In [13]:
import argparse
import os
import shutil
import time
import math
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data as data_utils
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import cv2
In [30]:
"""
ResNet defination: https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
"""
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class G_model(nn.Module):
def __init__(self, block, layers, num_common_blocks=4, num_albedo_blocks=4, num_shading_blocks=4, num_classes=1000):
"""
resnet18 begin
"""
self.inplanes = 64
super(G_model, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
# self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
# self.avgpool = nn.AvgPool2d(7)
# self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
"""
resnet18 end
"""
# self.myfc = nn.Linear(num_classes, 2)
"""
input data
"""
"""
conv or upsample to 8M:
from maxpool output(8M) and layer3 output(M)
"""
self.num_common_channels = 64
self.upsample1 = nn.Conv2d(64, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.upsample1_bn = nn.BatchNorm2d(self.num_common_channels)
self.upsample1_relu = nn.ReLU(inplace=True)
self.upsample2 = nn.ConvTranspose2d(256, 256, kernel_size=3, padding=1, bias=False)
self.upsample2_conv = nn.Conv2d(256, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.upsample2_bn = nn.BatchNorm2d(self.num_common_channels)
self.upsample2_relu = nn.ReLU(inplace=True)
"""
intrinsic net: common part
"""
self.common_layer = self._make_layer(block, self.num_common_channels, num_common_blocks)
"""
albedo part
"""
self.albedo_layer = self._make_layer(block, self.num_common_channels, num_albedo_blocks)
self.albedo_output = nn.ConvTranspose2d(self.num_common_channels, 3, kernel_size=8, padding=4, bias=True)
"""
shading part
"""
self.shading_layer = self._make_layer(block, self.num_common_channels, num_shading_blocks)
self.shading_output = nn.ConvTranspose2d(self.num_common_channels, 3, kernel_size=8, padding=4, bias=True)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
"""
resnet18 begin
"""
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
# x = self.avgpool(x)
# x = x.view(x.size(0), -1)
# x = self.fc(x)
# x = self.myfc(x)
"""
resnet18 end
"""
"""
finetune part
albedo
shading
"""
x = self.common_layer(x)
albedo = self.albedo_layer(x)
shading = self.shading_layer(x)
albedo = self.albedo_output(albedo)
shading = self.shading_output(shading)
return albedo, shading
class D_model(nn.Module):
def __init__(self):
self.conv1 = nn.Conv2d(64, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.relu1 = nn.ReLU(inplace=True)
self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv2 = nn.Conv2d(128, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.relu2 = nn.ReLU(inplace=True)
self.pool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv3 = nn.Conv2d(256, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.relu3 = nn.ReLU(inplace=True)
self.pool3 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv4 = nn.Conv2d(256, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.relu4 = nn.ReLU(inplace=True)
self.pool4 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv5 = nn.Conv2d(512, self.num_common_channels, kernel_size=5, padding=2, bias=False)
self.relu5 = nn.ReLU(inplace=True)
self.pool5 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.fc = nn.Linear(256//32, 1)
def forward(self, x):
x = self.conv1(x)
x = self.relu1(x)
x = self.pool1(x)
x = self.conv2(x)
x = self.relu2(x)
x = self.pool2(x)
x = self.conv3(x)
x = self.relu3(x)
x = self.pool3(x)
x = self.conv4(x)
x = self.relu4(x)
x = self.pool4(x)
x = self.conv5(x)
x = self.relu5(x)
x = self.pool5(x)
x = self.fc(x)
return x
pretrained_dict = torch.load('/Users/albertxavier/.torch/models/resnet18-5c106cde.pth')
G = G_model(BasicBlock, [2, 2, 2, 2])
G_dict = G.state_dict()
# 1. filter out unnecessary keys
pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in G_dict}
# 2. overwrite entries in the existing state dict
G_dict.update(pretrained_dict)
# 3. load the new state dict
G.load_state_dict(pretrained_dict)
D = D_model()
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-30-c2392e5eed4c> in <module>()
223 G_dict.update(pretrained_dict)
224 # 3. load the new state dict
--> 225 G.load_state_dict(pretrained_dict)
226
227 D = D_model()
/Users/albertxavier/anaconda/lib/python2.7/site-packages/torch/nn/modules/module.pyc in load_state_dict(self, state_dict)
317 missing = set(own_state.keys()) - set(state_dict.keys())
318 if len(missing) > 0:
--> 319 raise KeyError('missing keys in state_dict: "{}"'.format(missing))
320
321 def parameters(self, memo=None):
KeyError: 'missing keys in state_dict: "set([\'upsample1_bn.running_var\', \'shading_layer.1.bn2.running_mean\', \'common_layer.0.downsample.0.weight\', \'common_layer.0.bn1.running_var\', \'shading_layer.1.bn1.running_mean\', \'common_layer.1.bn1.running_var\', \'common_layer.3.bn2.bias\', \'upsample1_bn.weight\', \'shading_layer.0.conv1.weight\', \'common_layer.1.bn2.running_var\', \'albedo_layer.2.bn2.running_mean\', \'shading_layer.1.bn1.running_var\', \'common_layer.3.bn1.running_var\', \'common_layer.0.downsample.1.running_var\', \'common_layer.0.conv2.weight\', \'albedo_layer.1.bn2.weight\', \'common_layer.3.conv1.weight\', \'shading_layer.2.bn2.bias\', \'shading_layer.2.bn1.running_mean\', \'common_layer.0.bn2.bias\', \'albedo_layer.2.bn2.weight\', \'albedo_layer.2.conv2.weight\', \'common_layer.3.conv2.weight\', \'albedo_layer.3.bn1.running_mean\', \'shading_layer.2.bn1.running_var\', \'shading_layer.2.bn2.running_var\', \'common_layer.1.bn2.running_mean\', \'albedo_layer.2.bn1.bias\', \'common_layer.2.bn1.weight\', \'albedo_layer.0.bn2.weight\', \'upsample1.weight\', \'common_layer.0.bn1.weight\', \'common_layer.0.downsample.1.running_mean\', \'shading_layer.2.conv1.weight\', \'albedo_layer.1.bn2.bias\', \'shading_layer.2.bn1.weight\', \'common_layer.0.downsample.1.weight\', \'common_layer.2.conv1.weight\', \'common_layer.2.bn2.weight\', \'shading_layer.3.conv1.weight\', \'common_layer.1.bn1.bias\', \'albedo_layer.1.bn1.weight\', \'albedo_layer.3.bn1.bias\', \'common_layer.0.bn2.running_var\', \'common_layer.1.conv2.weight\', \'albedo_output.weight\', \'shading_layer.3.bn2.weight\', \'upsample2_bn.bias\', \'albedo_layer.1.bn1.bias\', \'albedo_layer.2.bn1.running_var\', \'common_layer.1.bn1.weight\', \'shading_layer.0.bn1.running_mean\', \'common_layer.0.bn1.running_mean\', \'common_layer.3.bn1.bias\', \'shading_layer.2.bn2.weight\', \'common_layer.2.bn2.running_mean\', \'shading_layer.3.bn1.bias\', \'albedo_layer.1.bn1.running_mean\', \'common_layer.0.bn2.running_mean\', \'shading_layer.1.conv2.weight\', \'albedo_layer.1.bn2.running_mean\', \'common_layer.0.bn2.weight\', \'albedo_layer.0.conv1.weight\', \'common_layer.3.bn2.weight\', \'albedo_layer.0.bn2.running_var\', \'albedo_layer.0.bn1.running_var\', \'shading_layer.1.bn1.bias\', \'common_layer.0.downsample.1.bias\', \'common_layer.2.bn1.running_mean\', \'albedo_layer.0.bn2.running_mean\', \'shading_layer.3.conv2.weight\', \'shading_layer.1.bn2.running_var\', \'albedo_layer.3.conv1.weight\', \'shading_layer.3.bn1.running_mean\', \'upsample2_bn.running_mean\', \'common_layer.2.bn2.running_var\', \'common_layer.1.conv1.weight\', \'common_layer.2.bn1.running_var\', \'albedo_layer.0.bn1.weight\', \'common_layer.1.bn1.running_mean\', \'shading_layer.3.bn2.running_mean\', \'albedo_layer.3.bn2.running_var\', \'albedo_layer.2.bn2.running_var\', \'shading_layer.3.bn1.weight\', \'shading_layer.0.bn2.running_mean\', \'shading_layer.1.bn2.bias\', \'shading_layer.2.bn2.running_mean\', \'common_layer.2.conv2.weight\', \'shading_layer.0.bn1.bias\', \'upsample1_bn.bias\', \'albedo_layer.2.conv1.weight\', \'albedo_layer.0.bn2.bias\', \'upsample2_conv.weight\', \'shading_layer.0.bn2.weight\', \'albedo_layer.0.bn1.running_mean\', \'common_layer.3.bn1.running_mean\', \'albedo_layer.3.bn2.weight\', \'common_layer.3.bn1.weight\', \'albedo_layer.3.conv2.weight\', \'common_layer.2.bn2.bias\', \'albedo_layer.2.bn2.bias\', \'albedo_output.bias\', \'common_layer.3.bn2.running_var\', \'shading_layer.2.bn1.bias\', \'common_layer.1.bn2.weight\', \'common_layer.0.bn1.bias\', \'shading_layer.1.bn1.weight\', \'upsample2_bn.weight\', \'albedo_layer.0.conv2.weight\', \'common_layer.2.bn1.bias\', \'shading_output.weight\', \'upsample2.weight\', \'albedo_layer.1.bn1.running_var\', \'albedo_layer.0.bn1.bias\', \'albedo_layer.3.bn1.weight\', \'albedo_layer.1.bn2.running_var\', \'shading_layer.0.conv2.weight\', \'common_layer.1.bn2.bias\', \'albedo_layer.2.bn1.running_mean\', \'albedo_layer.1.conv1.weight\', \'common_layer.3.bn2.running_mean\', \'albedo_layer.3.bn1.running_var\', \'upsample1_bn.running_mean\', \'shading_layer.0.bn1.running_var\', \'albedo_layer.1.conv2.weight\', \'albedo_layer.3.bn2.running_mean\', \'shading_layer.0.bn1.weight\', \'shading_layer.0.bn2.running_var\', \'shading_layer.1.conv1.weight\', \'shading_layer.2.conv2.weight\', \'shading_layer.3.bn2.bias\', \'shading_output.bias\', \'shading_layer.3.bn2.running_var\', \'albedo_layer.2.bn1.weight\', \'albedo_layer.3.bn2.bias\', \'upsample2_bn.running_var\', \'shading_layer.3.bn1.running_var\', \'shading_layer.1.bn2.weight\', \'common_layer.0.conv1.weight\', \'shading_layer.0.bn2.bias\'])"'
In [16]:
# pretrained_resnet18 = torch.load('/Users/albertxavier/.torch/models/resnet18-5c106cde.pth')
# # print pretrained_resnet18
# pretrained_dict = pretrained_resnet18.state_dict()
# G = G_model(BasicBlock, [2, 2, 2, 2])
# G_dict = G.state_dict()
# # 1. filter out unnecessary keys
# pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in G_dict}
# # 2. overwrite entries in the existing state dict
# G_dict.update(pretrained_dict)
# # 3. load the new state dict
# G_dict.load_state_dict(G_dict)
# D = D_model()
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
<ipython-input-16-f9e13d01004a> in <module>()
28 # G_dict.load_state_dict(pretrained_dict)
29
---> 30 D = D_model()
31
NameError: name 'D_model' is not defined
In [ ]:
lr = 0.01
criterion = nn.MSELoss();
G_solver = optim.Adam(G.parameters(), lr=lr)
D_solver = optim.Adam(D.parameters(), lr=lr)
In [138]:
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import os
import glob
%matplotlib inline
def default_loader(path):
return Image.open(path).convert('RGB')
def make_dataset(dir):
images_paths = glob.glob(os.path.join(dir, 'clean', '*', '*.png'))
albedo_paths = images_paths[:]
shading_paths = images_paths[:]
pathes = []
for img_path in images_paths:
sp = img_path.split('/'); sp[-3] = 'albedo'; sp = ['/'] + sp; albedo_path = os.path.join(*sp)
sp = img_path.split('/'); sp[-3] = 'albedo'; sp = ['/'] + sp; shading_path = os.path.join(*sp)
pathes.append((img_path, albedo_path, shading_path))
return pathes
class MyImageFolder(data_utils.Dataset):
def __init__(self, root, transform=None, target_transform=None,
loader=default_loader):
print "dsdas"
imgs = make_dataset(root)
if len(imgs) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.root = root
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
self.loader = loader
def __getitem__(self, index):
img_path, albedo_path, shading_path = self.imgs[index]
img = self.loader(img_path)
albedo = self.loader(albedo_path)
shading = self.loader(shading_path)
if self.transform is not None: img = self.transform(img)
if self.transform is not None: albedo = self.transform(albedo)
if self.transform is not None: shading = self.transform(shading)
# if self.target_transform is not None:
# target = self.target_transform(target)
return img, albedo, shading
def __len__(self):
return len(self.imgs)
dataset= MyImageFolder('/Volumes/xavier/dataset/sintel/images/',
transforms.Compose(
[transforms.ToTensor()]
))
# print dataset.imgs[3]
# print dataset.imgs[4]
# print dataset.imgs[5]
dataloader= data_utils.DataLoader(dataset,1,True,num_workers=1)
# print dataloader
# for i,(img,albedo,shading) in enumerate(dataloader):
# print '>>> i = ', i
# # print img
# img = img.numpy(); img = np.array(img[0,:,:,:]); img = img.transpose(1,2,0); plt.imshow(img);
# plt.figure(); albedo = albedo.numpy(); albedo = np.array(albedo[0,:,:,:]); albedo = albedo.transpose(1,2,0); plt.imshow(albedo);
# plt.figure(); shading = shading.numpy(); shading = np.array(shading[0,:,:,:]); shading = shading.transpose(1,2,0); plt.imshow(shading);
# break
dsdas
<torch.utils.data.dataloader.DataLoader object at 0x10b633250>
>>> i = 0
In [ ]:
def train_discriminator(discriminator, predict, groundtruth):
discriminator.zero_grad()
outputs = D(groundtruth)
real_loss = criterion(outputs, groundtruth)
real_score = outputs
outputs = discriminator(predict)
fake_loss = criterion(outputs, predict)
fake_score = outputs
d_loss = real_loss + fake_loss
d_loss.backward()
d_optimizer.step()
return d_loss, real_score, fake_score
In [ ]:
def train_generator(generator, discriminator_outputs, groundtruth):
generator.zero_grad()
g_loss = criterion(discriminator_outputs, groundtruth)
g_loss.backward()
g_optimizer.step()
return g_loss
In [ ]:
"""
Train
"""
epoches = 10
train_loader = dataloader
for epoch in range(num_epochs):
for n, (img,ground_truth_albedo,ground_truth_shading) in enumerate(train_loader):
img = Variable(img)
real_label_albedo = Variable(ground_truth_albedo)
predict_albedo, predict_shading = G(img)
d_loss, real_score, fake_score = train_discriminator(D, predict_albedo, ground_truth_albedo)
fake_image = G(img)
output = discriminator(fake_image)
g_loss = train_generator(G, output, ground_truth_albedo)
if (n+1) % 100 == 0:
test_albedo, _ = G(img)
cv2.imwrite("test.png", test_albedo)
In [ ]:
In [3]:
pretrained_resnet18 = models.resnet18(pretrained=True)
# print model.parameters
# print pretrained_resnet18
print pretrained_resnet18.state_dict()
OrderedDict([('conv1.weight',
(0 ,0 ,.,.) =
-1.0419e-02 -6.1356e-03 -1.8098e-03 ... 5.6615e-02 1.7083e-02 -1.2694e-02
1.1083e-02 9.5276e-03 -1.0993e-01 ... -2.7124e-01 -1.2907e-01 3.7424e-03
-6.9434e-03 5.9089e-02 2.9548e-01 ... 5.1972e-01 2.5632e-01 6.3573e-02
... ⋱ ...
-2.7535e-02 1.6045e-02 7.2595e-02 ... -3.3285e-01 -4.2058e-01 -2.5781e-01
3.0613e-02 4.0960e-02 6.2850e-02 ... 4.1384e-01 3.9359e-01 1.6606e-01
-1.3736e-02 -3.6746e-03 -2.4084e-02 ... -1.5070e-01 -8.2230e-02 -5.7828e-03
(0 ,1 ,.,.) =
-1.1397e-02 -2.6619e-02 -3.4641e-02 ... 3.2521e-02 6.6221e-04 -2.5743e-02
4.5687e-02 3.3603e-02 -1.0453e-01 ... -3.1253e-01 -1.6051e-01 -1.2826e-03
-8.3730e-04 9.8420e-02 4.0210e-01 ... 7.0789e-01 3.6887e-01 1.2455e-01
... ⋱ ...
-5.5926e-02 -5.2239e-03 2.7081e-02 ... -4.6178e-01 -5.7080e-01 -3.6552e-01
3.2860e-02 5.5574e-02 9.9670e-02 ... 5.4636e-01 4.8276e-01 1.9867e-01
5.3051e-03 6.6938e-03 -1.7254e-02 ... -1.4822e-01 -7.7248e-02 7.2183e-04
(0 ,2 ,.,.) =
-2.0315e-03 -9.1617e-03 2.1209e-02 ... 8.9177e-02 3.3655e-02 -2.0102e-02
1.5398e-02 -1.8648e-02 -1.2591e-01 ... -2.5342e-01 -1.2980e-01 -2.7975e-02
9.8454e-03 4.9047e-02 2.1699e-01 ... 3.4872e-01 1.0433e-01 1.8413e-02
... ⋱ ...
-2.8356e-02 1.8404e-02 9.8647e-02 ... -1.1740e-01 -2.5760e-01 -1.5451e-01
2.0766e-02 -2.6286e-03 -3.7825e-02 ... 2.4141e-01 2.4345e-01 1.1796e-01
7.4684e-04 7.7677e-04 -1.0050e-02 ... -1.4865e-01 -1.1754e-01 -3.8350e-02
⋮
(1 ,0 ,.,.) =
-4.4154e-03 -4.0645e-03 3.1589e-03 ... -3.7026e-02 -2.5158e-02 -4.7945e-02
5.1310e-02 5.3402e-02 8.0436e-02 ... 1.4480e-01 1.4287e-01 1.2312e-01
-7.3337e-03 2.1755e-03 3.7580e-02 ... 6.1517e-02 8.0324e-02 1.1715e-01
... ⋱ ...
-2.6754e-02 -1.2297e-01 -1.3653e-01 ... -1.4068e-01 -1.1155e-01 -4.9556e-02
2.3524e-02 -1.7288e-02 -1.1122e-02 ... -1.8826e-02 -2.3320e-02 -2.9474e-02
2.8689e-02 2.1659e-02 4.7888e-02 ... 2.5498e-02 3.5346e-02 1.1280e-02
(1 ,1 ,.,.) =
4.6919e-04 1.2153e-02 4.2035e-02 ... 4.6403e-02 4.0423e-02 -1.4439e-02
4.3463e-02 6.8779e-02 1.3268e-01 ... 2.8606e-01 2.6905e-01 2.0935e-01
-5.7621e-02 -2.2642e-02 3.0547e-02 ... 1.3763e-01 1.6538e-01 1.7946e-01
... ⋱ ...
-1.0816e-01 -2.5227e-01 -2.9742e-01 ... -2.8503e-01 -2.1493e-01 -1.0320e-01
4.0709e-02 -3.2771e-02 -6.3450e-02 ... -9.2360e-02 -6.9876e-02 -4.9841e-02
8.2942e-02 8.7580e-02 1.0111e-01 ... 5.2714e-02 6.0968e-02 4.1198e-02
(1 ,2 ,.,.) =
-1.6391e-02 -1.3870e-02 5.2810e-03 ... 4.3698e-02 2.2707e-02 -4.5983e-02
3.3202e-02 4.2014e-02 9.3500e-02 ... 2.6162e-01 2.2970e-01 1.6694e-01
-4.5987e-02 -1.6365e-02 2.6811e-02 ... 1.4951e-01 1.3216e-01 1.3579e-01
... ⋱ ...
-7.2129e-02 -1.8902e-01 -2.3389e-01 ... -1.9038e-01 -1.5609e-01 -7.5974e-02
5.1161e-02 -2.5815e-02 -6.9357e-02 ... -5.8999e-02 -6.1550e-02 -4.4555e-02
1.1174e-01 7.8979e-02 6.5849e-02 ... 3.1617e-02 2.5221e-02 7.4257e-03
⋮
(2 ,0 ,.,.) =
-7.0826e-08 -6.4306e-08 -7.3806e-08 ... -9.8000e-08 -1.0905e-07 -8.3421e-08
-6.1125e-09 2.0613e-09 -8.0922e-09 ... -4.9840e-08 -4.3836e-08 -3.0538e-09
7.1953e-08 7.5616e-08 5.9282e-08 ... -9.7509e-09 -1.0951e-09 4.2442e-08
... ⋱ ...
9.5889e-08 1.0039e-07 7.9817e-08 ... -1.7491e-08 -4.7666e-08 -1.3265e-08
1.2904e-07 1.4762e-07 1.7477e-07 ... 1.3233e-07 1.0628e-07 9.3316e-08
1.2558e-07 1.3644e-07 1.8431e-07 ... 2.1399e-07 1.7710e-07 1.7166e-07
(2 ,1 ,.,.) =
-1.2690e-07 -9.6139e-08 -1.0372e-07 ... -1.1808e-07 -1.3309e-07 -1.0820e-07
-5.7412e-08 -2.5055e-08 -3.0115e-08 ... -7.2922e-08 -6.7022e-08 -2.2574e-08
2.1813e-08 4.8608e-08 3.1222e-08 ... -1.8694e-08 -7.9591e-09 3.9750e-08
... ⋱ ...
5.6013e-08 7.5526e-08 4.4496e-08 ... -4.4128e-08 -5.9930e-08 -1.8247e-08
7.7614e-08 9.8348e-08 1.0455e-07 ... 6.3272e-08 4.1781e-08 4.5901e-08
5.9834e-08 7.1006e-08 9.0437e-08 ... 1.1654e-07 8.7550e-08 9.8837e-08
(2 ,2 ,.,.) =
-4.3810e-08 1.3270e-08 7.8275e-09 ... -5.8804e-09 -2.6217e-08 -1.5649e-08
4.1700e-08 1.0778e-07 1.0946e-07 ... 7.6403e-08 7.1450e-08 9.7615e-08
1.0436e-07 1.6586e-07 1.5933e-07 ... 1.3517e-07 1.3487e-07 1.6449e-07
... ⋱ ...
9.8763e-08 1.5072e-07 1.2547e-07 ... 6.8316e-08 6.8382e-08 1.1367e-07
9.1435e-08 1.3576e-07 1.3793e-07 ... 1.1678e-07 1.1723e-07 1.4394e-07
6.2183e-08 8.8184e-08 1.0456e-07 ... 1.3941e-07 1.3333e-07 1.5844e-07
...
⋮
(61,0 ,.,.) =
-6.1896e-02 -3.0206e-02 1.9225e-02 ... 4.3665e-02 -2.2114e-02 -4.2214e-02
-3.8061e-02 6.0774e-03 4.5797e-02 ... 9.6029e-02 5.9254e-02 2.9958e-02
-2.9672e-02 2.7766e-03 2.0457e-02 ... 5.9828e-02 4.1422e-02 2.3134e-02
... ⋱ ...
1.1916e-02 4.5701e-02 4.4892e-02 ... 4.7419e-02 2.2274e-02 -5.4993e-03
-3.2468e-02 -1.2210e-02 2.2023e-02 ... 5.8061e-02 -7.5033e-03 -5.9736e-02
-4.3314e-02 -2.8162e-02 -5.9126e-03 ... 8.8460e-02 8.4406e-03 -5.0019e-02
(61,1 ,.,.) =
-6.1292e-02 -1.4004e-02 1.7229e-02 ... 1.8349e-02 -3.2708e-02 -4.1060e-02
-3.1506e-02 2.4460e-02 4.5516e-02 ... 6.6806e-02 4.6687e-02 3.3248e-02
-3.2216e-02 2.0718e-02 2.3343e-02 ... 3.5265e-02 3.6478e-02 3.1291e-02
... ⋱ ...
1.7739e-02 6.1040e-02 4.8247e-02 ... 3.7785e-02 2.8894e-02 1.3984e-02
-1.0890e-02 2.2079e-02 4.2737e-02 ... 6.0247e-02 1.6197e-02 -1.2493e-02
-2.2284e-02 1.3220e-02 3.0897e-02 ... 1.0403e-01 4.0119e-02 -5.3310e-03
(61,2 ,.,.) =
-8.5322e-02 -4.2603e-02 6.8145e-03 ... 3.0751e-02 -3.4818e-02 -4.9945e-02
-2.9215e-02 1.8165e-02 5.1092e-02 ... 9.0200e-02 5.3438e-02 4.0169e-02
-3.9932e-02 -1.1100e-03 9.6176e-03 ... 2.4114e-02 2.6298e-02 2.5489e-02
... ⋱ ...
-3.1890e-03 3.0454e-02 1.6316e-02 ... 5.5054e-03 -6.2689e-03 -8.4638e-03
-2.2995e-02 -2.8211e-03 2.3203e-02 ... 3.5888e-02 -1.4296e-02 -3.2419e-02
-9.8894e-03 7.0542e-03 1.0659e-02 ... 7.0495e-02 1.2996e-02 -8.3417e-03
⋮
(62,0 ,.,.) =
-7.8699e-03 1.9911e-02 3.4208e-02 ... 2.8694e-02 1.2820e-02 1.8142e-02
8.7942e-03 -3.2875e-02 -3.5713e-02 ... 7.2533e-02 4.5889e-02 5.2383e-02
-3.6122e-02 -1.1878e-01 -1.3767e-01 ... 3.3811e-02 3.7806e-02 2.6944e-02
... ⋱ ...
1.7322e-02 3.9589e-03 -8.2269e-03 ... 2.7543e-03 1.8313e-02 1.6057e-02
-9.5007e-04 1.6428e-02 1.7156e-02 ... 3.3672e-03 2.2857e-02 6.5783e-04
6.1727e-03 2.7145e-02 1.4340e-02 ... 7.5867e-03 1.8770e-02 1.5624e-02
(62,1 ,.,.) =
-1.3423e-02 -5.0696e-04 8.0959e-03 ... -6.0963e-03 9.2341e-03 1.5751e-02
-1.8343e-02 -6.7982e-02 -7.0685e-02 ... 2.9855e-02 2.6264e-02 2.3773e-02
-5.4359e-02 -1.4663e-01 -1.6211e-01 ... 1.1781e-02 3.2477e-02 1.1980e-02
... ⋱ ...
8.3686e-04 -1.7564e-02 -1.9535e-02 ... -4.1382e-03 2.4658e-02 1.2893e-02
-6.3183e-04 1.1788e-02 2.4810e-02 ... 6.1105e-03 3.9210e-02 9.6696e-03
-7.1831e-03 6.6918e-03 5.2723e-03 ... -7.6077e-03 2.7253e-02 1.7735e-02
(62,2 ,.,.) =
-2.3753e-04 -4.9343e-03 2.2991e-03 ... -4.7958e-02 -2.6154e-02 -2.3525e-02
-3.3053e-04 -5.1502e-02 -5.9977e-02 ... -1.7369e-02 -2.3337e-02 -3.7312e-02
-2.2674e-02 -9.9412e-02 -1.1176e-01 ... -1.1725e-02 -8.3744e-03 -4.0615e-02
... ⋱ ...
1.1437e-02 -8.0313e-03 -1.4955e-03 ... -3.4133e-02 -8.7267e-03 -2.3526e-02
2.9522e-03 6.7770e-04 1.9933e-02 ... -2.2002e-02 1.4814e-02 -1.4487e-02
-1.9085e-02 -2.9430e-02 -2.3284e-02 ... -4.8587e-02 -1.3049e-02 -2.4368e-02
⋮
(63,0 ,.,.) =
-3.6296e-02 7.1996e-03 1.9100e-02 ... 1.9602e-02 1.4870e-02 -1.7298e-02
-1.1061e-02 8.5665e-02 1.2667e-01 ... 1.3744e-02 -5.5036e-05 -3.0162e-02
1.1322e-01 1.8634e-01 5.0658e-02 ... -1.7333e-01 -7.2041e-02 -6.2474e-02
... ⋱ ...
-5.3062e-02 -2.5781e-01 -2.6747e-01 ... 2.6781e-01 1.4344e-01 5.5145e-02
-2.1009e-02 -2.9969e-02 1.0245e-01 ... 2.0843e-01 -4.1518e-03 -3.8118e-02
-2.2155e-02 1.2380e-02 8.4302e-02 ... -4.4992e-02 -1.4687e-01 -9.0890e-02
(63,1 ,.,.) =
-5.3969e-03 3.2799e-02 1.5486e-02 ... -7.7451e-03 3.0229e-03 1.1216e-03
6.1723e-02 1.4899e-01 1.4645e-01 ... -2.8897e-02 -2.0227e-02 -9.1878e-03
1.6146e-01 2.0886e-01 -2.5589e-02 ... -2.7278e-01 -1.0735e-01 -6.2971e-02
... ⋱ ...
-1.3723e-01 -4.0863e-01 -3.8551e-01 ... 4.0846e-01 2.6202e-01 1.3491e-01
-5.9388e-02 -6.1187e-02 1.4197e-01 ... 3.5780e-01 9.0893e-02 -1.7392e-03
7.8613e-03 5.8403e-02 1.5339e-01 ... 4.7045e-02 -1.0095e-01 -9.7920e-02
(63,2 ,.,.) =
-5.6799e-03 1.3425e-02 -2.6461e-02 ... 4.4881e-03 2.0666e-03 1.3902e-02
6.5943e-03 4.5181e-02 6.0260e-02 ... 1.4368e-02 -5.0725e-03 4.0505e-03
5.5257e-02 1.2397e-01 4.3193e-02 ... -1.4486e-01 -7.4489e-02 -5.7533e-02
... ⋱ ...
-3.1513e-02 -1.6334e-01 -1.5795e-01 ... 2.2904e-01 1.2017e-01 7.1998e-02
-1.0456e-02 -1.1248e-03 8.4582e-02 ... 1.5748e-01 2.2142e-02 -1.0083e-02
-4.8639e-03 -5.0065e-03 3.6341e-02 ... -2.4361e-02 -7.1195e-02 -6.6788e-02
[torch.FloatTensor of size 64x3x7x7]
), ('bn1.weight',
2.3487e-01
2.6626e-01
-5.1096e-08
5.1870e-01
3.4404e-09
2.2239e-01
4.2289e-01
1.3153e-07
2.5093e-01
1.5152e-06
3.1687e-01
2.5049e-01
3.7893e-01
1.0862e-05
2.7526e-01
2.3674e-01
2.4202e-01
3.9531e-01
4.6935e-01
2.9090e-01
2.7268e-01
2.7803e-01
2.9069e-01
2.0693e-01
2.5899e-01
2.7871e-01
2.9115e-01
3.1601e-01
3.8889e-01
3.0411e-01
2.6776e-01
2.1093e-01
2.8708e-01
3.3243e-01
4.2673e-01
3.7326e-01
7.4804e-08
1.9068e-01
1.4740e-08
2.2303e-01
1.7908e-01
2.4860e-01
2.7400e-01
2.5923e-01
2.9420e-01
2.9924e-01
2.2369e-01
2.6280e-01
2.2001e-08
2.6610e-01
2.2089e-01
2.8429e-01
3.3072e-01
2.2681e-01
3.6538e-01
2.1230e-01
2.3965e-01
2.4950e-01
5.2583e-01
2.4825e-01
2.9565e-01
2.5878e-01
4.8326e-01
2.6670e-01
[torch.FloatTensor of size 64]
), ('bn1.bias',
2.3072e-01
2.5382e-01
-1.0543e-06
-6.6439e-01
-1.6571e-08
1.6152e-01
4.5450e-01
-4.3020e-07
3.0051e-01
-8.0052e-06
3.4942e-01
3.1148e-01
-2.4953e-01
-3.4749e-05
1.0773e-01
2.1897e-01
3.8141e-01
-5.2988e-01
-6.2864e-01
5.7140e-01
2.9985e-01
5.8430e-01
4.8202e-01
3.2853e-01
1.9672e-01
1.9496e-01
1.5215e-01
8.5522e-02
5.1314e-01
1.5237e-02
1.6644e-01
3.3239e-01
2.4921e-01
4.4337e-01
-2.8017e-01
-2.0385e-02
-2.4507e-07
3.2134e-01
-4.9152e-08
2.3777e-01
2.3291e-01
3.1527e-01
4.2776e-01
2.9313e-01
2.6379e-01
6.7598e-01
4.2910e-01
3.4566e-01
-8.6909e-08
2.4729e-01
3.0316e-01
6.1577e-01
3.9835e-01
3.3207e-01
-4.1219e-01
3.7807e-01
1.7895e-01
2.5748e-01
-4.4908e-01
2.1306e-01
5.6934e-01
5.7274e-01
-4.0238e-01
2.3406e-01
[torch.FloatTensor of size 64]
), ('bn1.running_mean',
2.7681e-03
-2.5769e-02
2.1254e-07
-8.4605e-02
2.1121e-08
4.9691e-04
-2.2408e-02
-1.1582e-07
-4.8239e-03
2.7507e-07
3.9582e-02
3.1994e-02
-3.7490e-02
-1.3716e-06
6.6002e-03
4.3782e-03
6.4797e-02
1.1176e-01
3.6002e-02
-7.5075e-02
-3.8240e-02
8.4358e-02
-5.2287e-02
-1.1799e-02
1.3019e-03
3.2172e-02
-1.7784e-02
-9.1009e-02
1.1319e-01
-4.1632e-02
8.7302e-03
2.9693e-02
-7.0502e-02
-3.4847e-03
1.0977e-01
-1.7341e-03
-5.9423e-08
2.9330e-02
-7.8553e-09
6.7320e-03
-3.7100e-03
1.6028e-02
-2.7883e-02
2.6593e-02
2.8475e-02
-1.2735e-01
4.4617e-02
2.6329e-02
2.1454e-08
-1.7045e-02
-3.5617e-03
-4.5841e-02
6.3876e-02
1.5220e-02
-3.8511e-02
-1.6428e-02
-1.6569e-02
5.6057e-02
-8.0306e-02
-2.6646e-03
-4.1718e-02
1.2611e-01
-4.9237e-02
-1.3261e-02
[torch.FloatTensor of size 64]
), ('bn1.running_var',
1.0169e+00
3.7167e+00
5.8133e-11
3.2825e+00
1.7107e-13
6.5823e-01
4.3701e+00
6.6005e-12
9.1552e-01
1.9318e-09
4.1256e+00
2.7440e+00
2.8391e+00
4.7966e-08
1.1072e+01
5.0075e-01
2.2313e+00
4.8257e+00
2.6986e+00
9.3700e+00
3.7339e+00
5.4843e+00
5.7127e+00
4.4544e-01
4.3628e-01
7.1563e+00
1.3718e+01
5.2512e+00
6.8174e+00
1.6724e+00
1.6534e+00
1.2325e+00
4.9076e+00
3.0731e+00
4.2384e+00
4.9936e+00
1.4465e-12
1.5212e+00
1.0352e-13
3.5134e-01
1.7025e-01
1.4205e+00
1.9085e+00
2.1512e+00
2.6608e+00
4.8444e+00
1.9297e+00
1.4999e+00
2.9481e-13
1.5306e+00
3.6503e-01
2.9376e+00
5.4664e+00
7.0792e-01
3.3315e+00
7.7180e-01
2.4068e+00
6.5214e+00
4.1263e+00
1.0506e+00
2.9530e+00
1.1366e+01
4.7690e+00
1.6559e+00
[torch.FloatTensor of size 64]
), ('layer1.0.conv1.weight',
(0 ,0 ,.,.) =
5.7593e-02 -9.5114e-02 -2.0272e-02
-7.4556e-02 -7.9931e-01 -2.1284e-01
6.5571e-02 -9.6534e-02 -1.2111e-02
(0 ,1 ,.,.) =
-6.9944e-03 1.4266e-02 5.5824e-04
4.1238e-02 -1.6125e-01 -2.3208e-02
3.2887e-03 7.1779e-03 7.1686e-02
(0 ,2 ,.,.) =
-2.3627e-09 -3.9270e-08 -3.2971e-08
2.1737e-08 8.3299e-09 1.2543e-08
1.1382e-08 8.8096e-09 1.5506e-08
...
(0 ,61,.,.) =
-3.6921e-02 1.8294e-02 -2.9358e-02
-9.8615e-02 -4.3645e-02 -5.2717e-02
-7.9635e-02 2.9396e-02 4.1479e-03
(0 ,62,.,.) =
1.6948e-02 1.3978e-02 9.6727e-03
1.4297e-02 -6.6985e-04 -2.2077e-02
1.2398e-02 3.5454e-02 -2.2320e-02
(0 ,63,.,.) =
-2.2600e-02 -2.5331e-02 -2.3548e-02
6.0860e-02 -9.6779e-02 2.4057e-02
-1.2750e-02 9.2237e-02 4.0152e-03
⋮
(1 ,0 ,.,.) =
2.2160e-02 4.2177e-02 -1.6428e-02
-2.9667e-02 5.6865e-02 2.5486e-02
4.3847e-03 5.1188e-02 1.0436e-02
(1 ,1 ,.,.) =
2.5342e-02 5.4374e-02 5.3888e-02
-2.8334e-02 -2.0139e-01 -5.6358e-02
5.6774e-02 7.4188e-02 2.1585e-02
(1 ,2 ,.,.) =
-3.1458e-08 3.5335e-08 5.3791e-08
-2.6896e-08 5.1530e-08 5.4480e-08
-3.8487e-08 -1.1234e-08 -7.5787e-09
...
(1 ,61,.,.) =
-1.2754e-01 4.3552e-02 -6.5607e-02
-6.0462e-02 1.5989e-01 -7.7070e-03
-9.4202e-02 5.0750e-02 -7.8154e-02
(1 ,62,.,.) =
-3.3309e-02 1.6631e-03 -8.8497e-03
1.5553e-02 -5.8277e-02 -2.7437e-02
1.3126e-02 -3.0268e-02 -2.1661e-03
(1 ,63,.,.) =
-4.2313e-03 3.4517e-02 3.8193e-03
5.4317e-02 -1.2457e-02 3.2900e-02
2.2000e-04 1.6040e-02 1.2764e-01
⋮
(2 ,0 ,.,.) =
-3.5247e-02 8.0748e-03 2.0353e-02
1.7344e-02 -2.4320e-02 -1.5511e-04
-2.7634e-04 2.8024e-02 -2.3777e-03
(2 ,1 ,.,.) =
-2.3741e-02 -3.2057e-03 -5.7059e-03
-1.1582e-02 1.7200e-03 2.1067e-02
4.3606e-03 -4.6459e-02 -7.2954e-02
(2 ,2 ,.,.) =
3.1002e-08 5.3568e-08 3.1873e-08
-1.6063e-08 -1.8072e-08 -1.9508e-09
-5.8339e-08 -4.5366e-08 -1.2395e-08
...
(2 ,61,.,.) =
-1.9689e-03 -2.6809e-02 -4.3760e-02
2.4518e-02 -2.8396e-02 -3.5896e-02
-1.7883e-04 -2.4661e-02 -2.0085e-02
(2 ,62,.,.) =
2.1551e-02 2.2789e-03 -2.5823e-02
2.3272e-02 -7.9333e-03 -2.0814e-03
-5.7062e-03 -2.6934e-02 -1.4421e-02
(2 ,63,.,.) =
-1.9674e-02 2.7914e-02 -2.0025e-02
6.3222e-02 -3.9077e-02 -3.3220e-03
-2.7434e-02 1.1390e-02 -3.1608e-03
...
⋮
(61,0 ,.,.) =
4.3440e-03 -7.6970e-03 -6.4950e-02
1.3846e-02 -2.2803e-02 -4.6478e-02
2.7776e-02 1.6080e-02 -1.3363e-02
(61,1 ,.,.) =
4.7379e-02 -2.4982e-02 -2.7605e-02
7.0091e-02 4.2084e-03 -1.0805e-01
1.7526e-02 4.5647e-02 7.8810e-03
(61,2 ,.,.) =
2.6680e-09 2.7671e-08 2.4702e-08
6.3905e-09 4.1020e-08 3.3631e-08
5.8335e-09 1.3334e-08 9.6604e-09
...
(61,61,.,.) =
4.5900e-03 4.7084e-02 -8.6949e-03
-6.3011e-03 5.9585e-02 5.8667e-03
-2.0255e-02 4.3285e-02 4.5094e-03
(61,62,.,.) =
1.1253e-03 -5.7461e-03 -6.8411e-03
6.0616e-03 7.3295e-03 -1.1784e-02
-1.1455e-03 5.1868e-03 -1.9867e-02
(61,63,.,.) =
1.7529e-02 4.4606e-02 -2.6595e-02
2.2102e-02 4.5857e-02 2.3347e-02
1.8052e-02 5.9689e-02 1.7129e-02
⋮
(62,0 ,.,.) =
-2.9112e-02 3.4242e-03 -1.7523e-02
-2.3682e-02 2.2716e-02 -3.8301e-02
-1.0308e-02 -4.3802e-03 -2.3582e-02
(62,1 ,.,.) =
-4.9607e-02 -3.2724e-03 -1.5345e-02
-1.3524e-02 5.4842e-02 1.1187e-02
-2.3549e-02 -2.8495e-02 -6.6371e-02
(62,2 ,.,.) =
-4.9804e-08 -2.8211e-08 -2.0583e-08
-5.2389e-08 -2.8522e-08 -3.5099e-08
-3.2171e-08 -3.4110e-08 -4.3153e-08
...
(62,61,.,.) =
3.4487e-03 2.6532e-02 -1.1202e-02
7.0925e-03 3.7903e-02 -3.2481e-02
4.1381e-02 3.2329e-02 2.8309e-03
(62,62,.,.) =
-6.5955e-03 1.6476e-02 2.1810e-02
-1.2293e-02 2.2310e-02 1.2645e-02
-8.9897e-03 1.1948e-03 -5.2390e-03
(62,63,.,.) =
-2.5295e-03 7.2689e-02 -7.8046e-03
-4.2221e-02 7.9756e-02 -2.7738e-02
4.6716e-03 -5.6596e-02 -8.2261e-02
⋮
(63,0 ,.,.) =
5.2235e-02 3.5231e-03 -3.3131e-02
3.1048e-02 1.6193e-02 1.7283e-02
1.4446e-02 2.4302e-02 -1.9689e-03
(63,1 ,.,.) =
-2.4717e-02 8.3009e-03 -6.1336e-02
-1.6134e-02 5.5323e-02 -6.5029e-02
-2.4715e-02 1.0030e-03 3.2437e-02
(63,2 ,.,.) =
1.8496e-08 5.2798e-09 4.1820e-08
3.7489e-08 2.5450e-08 3.0419e-08
1.1246e-08 -5.6956e-09 -2.0008e-08
...
(63,61,.,.) =
7.1194e-03 -4.1052e-02 -1.0002e-02
2.5924e-02 -6.3819e-02 1.3366e-02
2.9751e-02 -7.9476e-03 1.4007e-02
(63,62,.,.) =
-2.5166e-03 2.2051e-02 -1.9967e-02
-5.9436e-02 4.3872e-02 2.6832e-02
-1.7509e-02 2.4625e-02 2.4822e-02
(63,63,.,.) =
3.5832e-02 -7.0357e-02 3.9452e-03
-2.9835e-02 9.2727e-02 1.9336e-02
-2.9145e-02 -9.7087e-03 -7.3388e-02
[torch.FloatTensor of size 64x64x3x3]
), ('layer1.0.bn1.weight',
0.3090
0.2147
0.2366
0.4259
0.5137
0.2181
0.2204
0.2300
0.2640
0.2695
0.2138
0.4602
0.2661
0.2319
0.3900
0.2389
0.2660
0.3634
0.3474
0.2477
0.3285
0.5349
0.6440
0.2275
0.4482
0.3078
0.2604
0.4651
0.2179
0.2858
0.3426
0.4420
0.4450
0.4500
0.5516
0.5092
0.2564
0.2634
0.5664
0.6410
0.2228
0.1986
0.2460
0.2242
0.2143
0.1982
0.6368
0.3106
0.5049
0.2403
0.3065
0.3760
0.3794
0.4281
0.2991
0.3326
0.2596
0.3345
0.2006
0.4351
0.1683
0.5149
0.2629
0.3254
[torch.FloatTensor of size 64]
), ('layer1.0.bn1.bias',
0.1657
0.2420
0.1780
-0.0431
-0.2053
0.1598
0.2929
0.0912
0.1116
0.0884
0.1104
-0.2035
0.1539
0.0857
-0.1094
0.0654
0.0766
-0.2067
-0.0212
0.1396
0.0401
-0.2827
-0.3257
-0.0035
-0.4373
-0.1248
0.1282
-0.0874
0.1199
-0.0829
-0.5315
-0.0780
-0.3876
-0.0547
-0.1816
-0.1888
0.1320
0.0031
-0.2697
-0.2984
0.1394
0.2597
0.1372
0.0053
0.0132
0.3295
-0.2715
-0.0187
-0.2467
0.1579
0.0165
-0.0890
-0.1903
-0.0787
0.1700
-0.4832
0.0619
-0.0677
0.3125
-0.5064
0.3138
-0.2617
-0.1545
0.0063
[torch.FloatTensor of size 64]
), ('layer1.0.bn1.running_mean',
-0.4332
-0.1757
0.0307
-0.7058
-1.6364
-0.7989
-0.0678
-0.1956
-1.1260
-0.9578
0.0030
-1.8265
-0.0393
-0.8680
-1.1062
-0.6359
-0.9872
-0.5778
-1.3349
-0.3408
-1.1982
-1.6058
-2.1702
-0.8814
-0.8175
-0.6951
0.6542
-1.6422
0.2811
0.3163
-0.4123
-1.4023
-1.5044
-2.5031
-2.1580
-1.3645
-0.8579
-0.2206
-2.5548
-2.2695
-0.1609
-0.8552
0.5289
1.3492
-0.9382
-0.3356
-2.9168
-1.5967
-1.8875
-1.6166
-1.9443
-2.0195
-0.9671
-1.3881
-1.8836
0.1869
-1.3487
-0.4593
-0.4542
-0.9032
-0.0768
-1.7719
1.2484
-0.9139
[torch.FloatTensor of size 64]
), ('layer1.0.bn1.running_var',
0.4351
0.2044
0.2344
0.5559
0.9626
0.3484
0.0871
0.6851
0.4714
1.2642
0.1519
0.6730
0.2430
0.5577
0.8701
0.2419
0.2052
0.8149
0.3040
0.2617
0.8060
0.8007
1.5581
0.2404
0.4445
0.6765
0.5562
0.9378
0.2584
0.3173
0.0962
0.4118
0.5197
0.9767
1.2703
0.8908
0.3609
0.2227
1.1588
1.5965
0.4060
0.2559
0.1763
0.2797
0.3757
0.1282
1.8280
0.3145
0.7419
0.2129
0.8122
0.4660
0.4065
0.4914
0.4814
0.1697
0.4000
0.3867
0.1499
0.4137
0.0671
0.8303
0.2434
0.3449
[torch.FloatTensor of size 64]
), ('layer1.0.conv2.weight',
(0 ,0 ,.,.) =
2.5947e-02 -1.0458e-01 -4.7712e-03
-8.6223e-02 -3.3021e-01 -1.0275e-01
-5.7426e-02 -1.9074e-01 -5.4646e-02
(0 ,1 ,.,.) =
-1.6951e-02 2.1384e-02 -2.1074e-03
-3.2983e-03 4.5014e-02 -1.1510e-02
-5.9602e-02 6.4942e-03 2.9080e-03
(0 ,2 ,.,.) =
-4.4903e-03 1.9637e-02 1.3167e-02
1.3050e-02 -7.7471e-03 1.1931e-02
1.3454e-02 1.1103e-02 5.5145e-03
...
(0 ,61,.,.) =
1.2706e-03 -7.7438e-03 2.0753e-02
-4.0024e-02 -4.0383e-02 -3.4821e-02
-2.0251e-02 -9.5164e-03 1.3954e-02
(0 ,62,.,.) =
-2.3430e-03 3.2303e-02 -4.3342e-03
8.6194e-03 1.0553e-02 1.8074e-03
-1.2760e-02 -1.0232e-02 4.5711e-03
(0 ,63,.,.) =
1.5302e-02 2.1361e-02 -7.0908e-03
-1.4221e-02 4.5979e-02 2.1369e-02
3.1312e-02 6.6428e-02 2.1465e-02
⋮
(1 ,0 ,.,.) =
5.3422e-02 4.0515e-02 9.6680e-03
3.2884e-02 -2.3474e-02 3.4642e-02
-1.2861e-02 5.0066e-02 5.4579e-02
(1 ,1 ,.,.) =
2.8764e-02 4.3431e-02 2.8258e-02
2.8734e-02 -3.5459e-02 -5.2788e-02
-5.5119e-02 -7.1813e-02 -8.2970e-02
(1 ,2 ,.,.) =
9.5293e-02 1.2549e-01 -6.4001e-02
-4.1166e-02 -9.0480e-04 5.1387e-02
-1.1311e-01 -7.9823e-02 1.4373e-01
...
(1 ,61,.,.) =
-7.6924e-03 2.0647e-02 1.9521e-02
-6.7352e-03 1.2601e-04 4.8309e-03
-6.2405e-03 -9.2119e-03 -2.5806e-04
(1 ,62,.,.) =
-2.6153e-02 -2.4641e-02 4.0970e-02
-1.9164e-02 -1.0160e-02 3.3163e-02
5.4200e-03 9.0485e-04 6.7799e-04
(1 ,63,.,.) =
7.7762e-03 2.6447e-02 6.3650e-02
-3.0608e-02 2.4959e-02 1.2951e-02
-2.0938e-02 -7.7342e-03 -3.8790e-02
⋮
(2 ,0 ,.,.) =
1.0893e-02 -1.4409e-02 1.5730e-02
1.6655e-02 4.4535e-02 6.3212e-02
3.4121e-02 7.3135e-02 5.9203e-02
(2 ,1 ,.,.) =
2.3195e-03 7.7598e-03 2.0308e-02
2.0457e-02 4.0029e-02 3.4744e-02
-4.7356e-02 -3.7286e-02 1.4542e-02
(2 ,2 ,.,.) =
-2.2742e-02 -1.9000e-02 -8.4317e-03
-9.8759e-04 2.1510e-02 6.3959e-03
-9.4558e-03 2.6833e-03 -3.1136e-02
...
(2 ,61,.,.) =
-7.5787e-03 -1.6056e-02 -6.4204e-04
-5.5104e-03 1.4252e-02 4.5000e-02
-9.2800e-03 2.2351e-02 4.1728e-02
(2 ,62,.,.) =
2.5705e-02 4.8207e-02 7.9145e-02
-4.4350e-03 3.8872e-03 4.1694e-02
8.0536e-04 -1.0601e-02 9.2706e-03
(2 ,63,.,.) =
-3.3892e-02 9.3543e-03 4.1746e-02
-1.6470e-02 3.9542e-03 6.2438e-02
-3.1055e-02 -3.6302e-03 7.0817e-02
...
⋮
(61,0 ,.,.) =
-7.1044e-05 -9.0020e-03 -2.6998e-03
3.0072e-03 1.1579e-02 1.5214e-02
3.4832e-03 1.1353e-05 1.6320e-02
(61,1 ,.,.) =
-2.6334e-02 2.1967e-02 -6.0039e-02
4.4519e-02 1.3203e-01 -9.1163e-03
5.4242e-02 1.3726e-01 2.7454e-02
(61,2 ,.,.) =
1.7122e-02 3.7646e-03 1.4872e-02
1.2092e-02 1.1319e-02 3.4667e-02
8.1790e-03 -2.0805e-02 2.7143e-02
...
(61,61,.,.) =
-1.0111e-02 -1.0526e-02 2.8394e-02
-2.5112e-02 -2.2196e-02 3.7229e-02
-3.8220e-02 -4.6644e-02 1.5660e-02
(61,62,.,.) =
-2.5913e-03 -2.4307e-02 1.0611e-02
-2.1730e-02 -4.3938e-02 -7.1536e-03
-2.5171e-02 -5.9467e-02 -2.5577e-02
(61,63,.,.) =
2.8652e-02 2.5850e-04 1.1416e-03
3.7812e-02 -1.1271e-03 9.6027e-03
3.9350e-02 1.0134e-02 1.0449e-02
⋮
(62,0 ,.,.) =
-7.9305e-03 7.0872e-03 2.1412e-02
-6.0065e-02 1.4147e-03 9.7281e-02
-6.0130e-02 -2.1725e-02 3.6863e-02
(62,1 ,.,.) =
2.8024e-02 2.6183e-02 -2.3027e-02
5.1900e-02 -2.0588e-03 -1.0940e-01
-3.2729e-02 -6.2752e-03 8.0630e-03
(62,2 ,.,.) =
-1.8062e-02 -1.9510e-02 4.3163e-02
4.6080e-02 2.9494e-02 4.0844e-02
5.9607e-03 -6.5891e-03 -6.4623e-03
...
(62,61,.,.) =
2.2193e-02 8.4653e-03 3.6764e-03
1.7549e-02 2.1971e-02 -4.5108e-03
2.1124e-02 3.4591e-02 -1.6310e-02
(62,62,.,.) =
3.8144e-02 4.8395e-02 -9.5556e-02
1.8923e-02 1.1341e-02 -7.6311e-02
4.7358e-03 3.2138e-02 -7.4777e-02
(62,63,.,.) =
-1.9031e-02 -3.2568e-02 -3.8251e-02
1.0705e-02 2.3121e-03 -7.5078e-02
3.3316e-02 3.5515e-02 -2.1023e-03
⋮
(63,0 ,.,.) =
-1.3330e-01 7.4683e-02 -3.8624e-03
9.1377e-02 8.2415e-02 3.9469e-02
-1.8265e-02 -5.9943e-02 8.9354e-02
(63,1 ,.,.) =
1.5566e-02 -4.1716e-02 1.0633e-02
7.2644e-03 3.1934e-02 1.2732e-03
-2.0851e-02 -3.7593e-03 -7.0170e-02
(63,2 ,.,.) =
-6.6139e-02 1.0627e-01 1.9590e-02
5.4987e-02 -1.5552e-01 -1.8819e-02
-4.2554e-03 4.4964e-02 -2.4632e-02
...
(63,61,.,.) =
-6.1691e-02 -4.5531e-02 -9.1721e-03
4.3995e-02 4.5703e-02 -7.0108e-02
1.1388e-02 4.4678e-02 -4.5953e-02
(63,62,.,.) =
4.3432e-03 2.3194e-02 -2.1895e-02
-8.0216e-02 -5.7606e-02 -9.8455e-03
-3.3285e-02 -1.1468e-01 -2.3779e-02
(63,63,.,.) =
-6.3785e-02 -2.4485e-02 -4.9061e-02
-6.1594e-02 1.0328e-01 5.9685e-03
8.1863e-02 -3.0314e-02 -4.6373e-03
[torch.FloatTensor of size 64x64x3x3]
), ('layer1.0.bn2.weight',
0.2496
0.2198
0.2756
0.6073
0.2654
0.2942
0.1136
0.4425
0.2868
0.2974
0.2506
0.4103
0.4855
0.3383
0.4670
0.1772
0.2171
0.5025
0.2263
0.3667
0.4867
0.4586
0.4652
0.2200
0.1510
0.2761
0.3813
0.2803
0.2382
0.3953
0.3032
0.3163
0.2025
0.2323
0.2003
0.1661
0.4690
0.3476
0.3414
0.2274
0.2485
0.2356
0.2726
0.4657
0.3429
0.2465
0.4674
0.2812
0.6241
0.4152
0.3403
0.4218
0.1152
0.2985
0.5802
0.2795
0.4706
0.4517
0.4303
0.2749
0.3427
0.1137
0.5069
0.4370
[torch.FloatTensor of size 64]
), ('layer1.0.bn2.bias',
0.2275
0.0087
-0.0673
-0.0688
0.3598
-0.2017
-0.0000
0.0237
0.3955
0.0371
0.0069
0.2758
-0.0703
-0.2397
-0.0818
-0.0941
-0.1454
0.0373
-0.3617
-0.3956
-0.4079
0.0036
-0.2788
-0.0353
-0.0703
0.2101
-0.0046
-0.1966
-0.2807
-0.0165
0.2645
-0.0894
-0.2105
-0.1303
0.1721
0.0534
-0.2230
-0.0480
0.2457
0.2095
0.1622
0.1137
0.1146
-0.1487
-0.0322
-0.3055
0.4912
0.1087
0.0128
0.1004
0.4155
-0.0147
0.0239
0.0998
-0.1727
0.1008
-0.1456
-0.2274
0.1364
0.2013
-0.0574
0.2353
-0.1130
0.3093
[torch.FloatTensor of size 64]
), ('layer1.0.bn2.running_mean',
0.1677
0.0052
0.7868
0.1670
0.4774
-0.0362
-0.0578
0.0020
-0.3420
0.0674
-0.0970
-0.1000
-0.0659
-0.0385
-0.0709
-0.2218
-0.1343
0.1198
-0.0602
0.0006
0.0864
-0.1466
0.0354
-0.1539
-0.0416
-0.4317
0.0330
-0.0797
-0.5868
-0.2461
-0.1505
-0.4012
-0.2187
-0.6351
0.0698
0.1412
0.2754
-0.5645
-0.1113
-0.2414
-0.1313
-0.0215
0.1264
-0.1684
0.1693
-0.4684
-0.6908
0.2044
0.0015
-0.0543
-0.1487
0.1262
-0.1145
-0.1597
0.1230
-0.1605
-0.2256
-0.0995
-0.1621
0.3230
-0.0055
0.1091
0.0781
0.0121
[torch.FloatTensor of size 64]
), ('layer1.0.bn2.running_var',
0.1130
0.0412
0.0335
0.1282
0.2084
0.0307
0.0606
0.0737
0.0313
0.0409
0.1376
0.0399
0.0437
0.0282
0.1588
0.0288
0.0837
0.0799
0.0177
0.1839
0.0884
0.3054
0.1512
0.0394
0.0374
0.0969
0.1719
0.0610
0.0607
0.1560
0.0448
0.1236
0.0464
0.1005
0.0498
0.0481
0.0450
0.1229
0.0623
0.0381
0.0229
0.1227
0.1656
0.1047
0.1316
0.1834
0.0622
0.1272
0.1929
0.0419
0.0263
0.2623
0.0712
0.1442
0.0937
0.0983
0.1163
0.1511
0.1009
0.0342
0.1854
0.0698
0.0631
0.0350
[torch.FloatTensor of size 64]
), ('layer1.1.conv1.weight',
(0 ,0 ,.,.) =
1.9712e-02 -5.2562e-03 -3.7619e-03
-1.9635e-02 -1.2336e-02 -3.5196e-02
5.0761e-02 7.5668e-02 4.3344e-02
(0 ,1 ,.,.) =
1.4160e-02 -8.6094e-03 -1.0541e-02
-4.2586e-02 -2.3814e-02 -5.4694e-02
-1.4018e-03 4.6720e-02 5.0898e-02
(0 ,2 ,.,.) =
2.1559e-02 4.1633e-03 -9.7118e-03
-9.3201e-03 -2.5432e-02 -2.8274e-02
-3.0107e-02 -4.8230e-02 -2.6001e-02
...
(0 ,61,.,.) =
5.4300e-03 9.1875e-02 3.1938e-03
-1.7945e-02 5.7266e-02 -8.4098e-03
-3.4961e-02 -2.3296e-02 -3.5089e-02
(0 ,62,.,.) =
2.5603e-02 -3.1689e-02 -5.4160e-02
6.9736e-02 -1.0716e-02 -6.8034e-02
3.5578e-02 3.4749e-02 -1.9334e-02
(0 ,63,.,.) =
-6.5420e-02 -4.6427e-03 -2.3362e-02
7.5833e-02 9.1174e-03 -4.9701e-02
6.2944e-02 -9.8735e-02 3.3158e-02
⋮
(1 ,0 ,.,.) =
-9.0557e-03 -3.0753e-02 1.1953e-02
-3.2539e-02 -6.2846e-03 -2.0235e-02
4.7996e-03 -2.1462e-02 -4.1557e-03
(1 ,1 ,.,.) =
1.7163e-02 -2.3303e-03 7.3972e-02
-3.2105e-02 -7.7536e-02 -1.2648e-02
3.8985e-02 -4.3170e-02 1.0904e-02
(1 ,2 ,.,.) =
-2.9643e-02 -5.8534e-02 -5.9736e-02
-2.9437e-02 -3.6441e-02 -1.2380e-02
-2.2775e-02 -2.4485e-03 -1.6124e-03
...
(1 ,61,.,.) =
2.6830e-02 1.4267e-02 6.2658e-02
3.0585e-04 -5.3241e-03 3.2786e-03
2.1097e-02 -2.3189e-02 1.2102e-02
(1 ,62,.,.) =
-6.1182e-02 -2.9227e-02 2.0036e-02
-7.6089e-02 -7.7057e-02 8.6544e-02
-3.9228e-02 -3.2361e-02 -8.8970e-02
(1 ,63,.,.) =
-1.3372e-01 8.8362e-02 8.3836e-02
-1.1688e-02 4.3156e-01 -3.3629e-03
-2.3925e-02 -1.0092e-01 -1.0184e-01
⋮
(2 ,0 ,.,.) =
8.0165e-02 4.3042e-02 2.7325e-03
3.5269e-02 -1.5504e-02 -3.5011e-02
-1.7164e-02 -2.6827e-02 -3.3946e-02
(2 ,1 ,.,.) =
4.5439e-02 5.1585e-02 1.8321e-02
-3.9647e-02 2.3956e-02 -2.6609e-02
-3.0358e-02 -6.4729e-02 2.5834e-02
(2 ,2 ,.,.) =
3.8105e-02 4.0986e-02 4.1005e-02
1.7584e-02 -1.6494e-02 -3.2716e-02
5.5886e-03 -1.7068e-02 -3.0605e-02
...
(2 ,61,.,.) =
-1.3694e-01 -1.4074e-01 5.1423e-02
-1.2521e-01 -1.3128e-01 7.5733e-02
-4.5032e-02 -1.7081e-02 7.1252e-02
(2 ,62,.,.) =
6.3381e-02 1.5874e-02 -2.7322e-02
8.0356e-02 3.6104e-02 -2.8506e-02
2.6638e-02 2.2021e-02 3.2345e-02
(2 ,63,.,.) =
-1.2068e-03 -4.6179e-02 -1.5351e-02
-1.1276e-02 1.9200e-02 3.4336e-02
1.6540e-02 -7.8592e-03 -2.5392e-02
...
⋮
(61,0 ,.,.) =
3.3384e-02 6.9963e-02 1.0745e-02
-1.7518e-02 -5.3524e-02 -6.4960e-02
3.4248e-04 -4.5557e-02 -4.7336e-02
(61,1 ,.,.) =
-5.1031e-03 7.9784e-03 -8.6553e-04
-1.6557e-03 1.4661e-02 5.3365e-03
-3.1784e-02 -6.6940e-02 -4.6889e-02
(61,2 ,.,.) =
-1.1775e-02 7.2759e-03 7.6622e-03
-6.1288e-02 -5.2078e-02 -4.5152e-02
-8.6584e-02 -9.7381e-02 -1.0405e-01
...
(61,61,.,.) =
2.1243e-02 6.2456e-02 2.5188e-02
-2.2911e-02 -2.1100e-03 -2.7573e-02
4.6557e-02 6.4980e-02 3.1879e-02
(61,62,.,.) =
6.2867e-03 2.4255e-02 8.9674e-02
-7.7718e-03 -5.4311e-02 -4.6843e-02
-6.7499e-03 -6.6857e-02 -4.9842e-02
(61,63,.,.) =
4.7326e-03 -3.9533e-02 1.1500e-03
-2.7957e-02 -1.3466e-01 -6.0753e-02
-3.2010e-03 7.2213e-02 1.1009e-01
⋮
(62,0 ,.,.) =
2.3763e-02 -1.7876e-02 -7.4843e-03
1.6239e-02 5.4479e-04 -3.3735e-02
-2.2854e-02 -1.4316e-03 1.1010e-02
(62,1 ,.,.) =
5.2277e-03 -2.5941e-03 5.9594e-03
-2.9058e-03 -7.3409e-03 3.0652e-02
7.5540e-02 6.6445e-03 2.5518e-03
(62,2 ,.,.) =
-6.5970e-02 -4.1286e-02 -3.0278e-02
-3.5108e-02 -3.9099e-02 -1.6818e-02
-1.0224e-02 -8.6995e-03 -5.9939e-04
...
(62,61,.,.) =
2.1233e-02 -2.4559e-02 -7.4436e-03
-4.3734e-03 -3.2864e-02 -3.3453e-02
8.9269e-03 -1.7646e-02 3.8375e-04
(62,62,.,.) =
-7.8930e-02 -7.2940e-02 -6.7911e-02
-8.4146e-02 -8.3657e-02 5.3666e-02
-3.5577e-02 -3.6835e-02 5.8987e-03
(62,63,.,.) =
8.3767e-02 8.0476e-05 7.2164e-02
-6.4219e-02 -1.2661e-01 4.6026e-02
9.3033e-02 -4.7521e-02 3.6777e-02
⋮
(63,0 ,.,.) =
4.1012e-02 1.3361e-03 -5.8616e-02
4.2461e-02 2.9437e-03 -2.0445e-02
7.6097e-02 5.2504e-02 -5.5636e-03
(63,1 ,.,.) =
2.2046e-02 4.0888e-03 1.4645e-02
-7.7532e-02 -1.1912e-01 -7.0892e-02
-1.0618e-02 -3.2121e-02 -2.3969e-02
(63,2 ,.,.) =
-2.1612e-02 -2.6110e-03 -3.1664e-02
-3.2892e-02 -3.9771e-02 -5.1463e-02
-2.6150e-02 -3.6554e-02 -2.3315e-02
...
(63,61,.,.) =
4.4600e-03 8.4181e-02 2.3199e-02
5.7595e-02 1.3036e-01 3.2172e-02
-2.2774e-03 4.2065e-02 -4.8619e-02
(63,62,.,.) =
3.1533e-02 -4.3655e-02 2.0361e-02
3.9973e-03 -5.1430e-02 -6.3839e-02
6.4002e-03 4.5347e-02 4.7346e-02
(63,63,.,.) =
-9.1818e-02 1.0264e-02 9.6565e-02
-2.1635e-03 -2.3452e-02 -5.9038e-02
1.9402e-02 2.8854e-02 -9.6113e-02
[torch.FloatTensor of size 64x64x3x3]
), ('layer1.1.bn1.weight',
0.3910
0.4375
0.3746
0.3990
0.3404
0.3503
0.2618
0.2707
0.2865
0.4308
0.1895
0.3041
0.3837
0.2944
0.2105
0.3304
0.2943
0.2887
0.2060
0.4627
0.2335
0.1831
0.4489
0.2830
0.3389
0.2997
0.3503
0.2735
0.3908
0.2817
0.2636
0.4462
0.3282
0.3776
0.4471
0.3878
0.2516
0.3172
0.3661
0.3166
0.3818
0.3128
0.2274
0.3627
0.2902
0.2381
0.2988
0.2469
0.3840
0.2886
0.3197
0.2879
0.3218
0.4559
0.3500
0.2420
0.3396
0.3519
0.3839
0.3806
0.4039
0.2826
0.4594
0.3342
[torch.FloatTensor of size 64]
), ('layer1.1.bn1.bias',
-0.0997
-0.4755
-0.0474
-0.2698
-0.0834
-0.0072
0.0474
0.1022
-0.0170
-0.1471
0.2307
0.1447
-0.1775
0.0273
0.1559
-0.1836
0.1238
-0.1522
0.0554
-0.2881
-0.2606
0.2316
-0.3242
-0.0219
-0.2645
0.0576
-0.2465
0.0481
-0.3530
0.0950
-0.1862
-0.1707
-0.0161
-0.2604
-0.3145
-0.1083
0.0659
-0.1427
-0.0570
-0.0076
-0.3006
-0.0744
-0.0683
-0.1104
0.0253
0.0489
-0.2515
0.1150
-0.3783
0.0846
-0.0368
0.1439
-0.0468
-0.3087
-0.0240
0.1397
-0.0908
-0.1795
-0.1129
-0.0793
-0.1491
0.0594
-0.4433
-0.0138
[torch.FloatTensor of size 64]
), ('layer1.1.bn1.running_mean',
-0.6534
0.9240
-1.3403
-0.7395
-0.5830
-1.6717
-0.3376
0.1913
-0.4565
-0.7877
-0.3756
-0.2295
-1.7003
-0.6135
0.5422
-0.1072
-0.2315
-0.3775
-1.8026
-0.7210
-0.0288
-1.2585
-1.8144
0.0504
-0.0739
-1.5506
-1.5092
-1.0623
0.1706
0.1527
0.3983
-2.9065
-0.9070
-0.2983
-1.8404
-2.3956
0.2241
-0.0760
-0.9525
-1.4632
0.7657
-0.3832
0.8590
-1.3211
-1.2599
-0.1220
-0.2230
0.5071
1.0262
-0.5969
-0.0104
-1.4013
-0.4267
-0.9979
-1.9458
0.1991
-0.8841
-0.8302
-0.3076
-2.0759
-1.2645
0.2679
0.4349
-1.2568
[torch.FloatTensor of size 64]
), ('layer1.1.bn1.running_var',
0.7111
0.5543
0.6143
0.5148
0.2840
0.4924
0.3536
0.3939
0.2511
0.4859
0.1803
0.7468
0.4225
0.3686
0.1719
0.2777
0.3676
0.2311
0.3515
0.4917
0.1393
0.1732
0.6248
0.3038
0.1599
0.5246
0.2410
0.5096
0.5251
0.5369
0.1800
1.0623
0.4006
0.2060
0.5194
0.4981
0.4250
0.2616
0.8252
0.4991
0.3290
0.3642
0.2716
0.6520
0.4492
0.2753
0.3377
0.3167
0.3830
0.4624
0.4098
0.5566
0.5048
0.4747
0.6820
0.4387
0.3506
0.2995
0.5595
0.6855
0.5260
0.6478
0.4960
0.5449
[torch.FloatTensor of size 64]
), ('layer1.1.conv2.weight',
(0 ,0 ,.,.) =
-2.1574e-02 -4.5688e-03 4.5483e-03
-8.1870e-03 4.1740e-02 2.3010e-02
-8.9283e-03 5.7352e-02 2.9818e-02
(0 ,1 ,.,.) =
5.8627e-02 4.2864e-02 4.4912e-02
2.2281e-02 -1.2969e-02 7.6099e-03
4.5373e-02 3.0712e-02 3.7700e-02
(0 ,2 ,.,.) =
-1.5456e-02 -3.8692e-02 -4.6010e-02
-2.3123e-02 2.8293e-02 4.7790e-03
-2.0328e-02 1.3756e-02 2.5883e-02
...
(0 ,61,.,.) =
5.1302e-02 4.2291e-02 5.7833e-02
4.5210e-02 5.5850e-02 1.4318e-02
1.4241e-02 1.7968e-02 1.4344e-02
(0 ,62,.,.) =
4.6012e-03 1.2566e-02 4.8931e-02
-6.5754e-03 -2.6431e-02 1.5855e-02
1.3192e-02 1.9011e-02 1.3842e-02
(0 ,63,.,.) =
6.1983e-02 6.9919e-02 6.1035e-02
6.1253e-02 9.9557e-02 5.9060e-02
5.8298e-02 8.1652e-02 8.1499e-02
⋮
(1 ,0 ,.,.) =
-1.0088e-02 -1.2959e-02 9.7798e-03
5.5408e-02 4.3501e-02 5.6983e-02
5.3427e-02 3.5118e-02 3.6782e-02
(1 ,1 ,.,.) =
2.4442e-03 -3.0207e-02 -1.0377e-02
-4.5297e-02 -4.5318e-02 5.4623e-03
-4.4762e-02 -1.5508e-02 6.9745e-03
(1 ,2 ,.,.) =
3.9658e-02 3.6838e-02 5.8796e-03
2.3207e-02 3.9240e-03 -2.0887e-02
-1.4829e-02 5.3606e-03 1.7404e-03
...
(1 ,61,.,.) =
3.2160e-02 5.9042e-02 4.8433e-02
-2.6464e-02 -8.0667e-03 -1.0359e-02
-2.6699e-02 -9.5411e-03 -2.8902e-02
(1 ,62,.,.) =
-2.9235e-02 -3.9078e-02 -4.4955e-02
-2.0346e-02 -4.4891e-02 -3.7477e-02
1.9653e-02 -1.5562e-03 -5.8245e-03
(1 ,63,.,.) =
-5.0696e-02 -4.8902e-02 9.1631e-03
5.1668e-03 2.0509e-02 6.6874e-02
2.8934e-02 4.6717e-02 2.1371e-02
⋮
(2 ,0 ,.,.) =
2.1744e-02 -2.8354e-02 -3.2557e-02
3.0519e-02 1.8536e-02 1.5244e-02
1.3832e-03 1.7051e-02 3.2020e-02
(2 ,1 ,.,.) =
-3.6293e-02 1.0914e-02 4.5371e-02
1.3399e-02 6.4272e-02 8.8210e-02
4.6697e-02 9.9653e-02 8.7606e-02
(2 ,2 ,.,.) =
-2.4336e-02 -2.9627e-02 1.9537e-02
-3.3412e-02 -2.2290e-02 -2.8879e-02
1.4765e-02 1.7234e-02 -1.8185e-02
...
(2 ,61,.,.) =
-3.9859e-02 -7.1075e-02 -5.8546e-02
2.2902e-02 1.1184e-02 -2.3654e-02
8.1897e-02 1.1996e-01 9.3242e-02
(2 ,62,.,.) =
3.1984e-02 7.4931e-02 6.6020e-02
2.8490e-02 1.1931e-01 1.2100e-01
7.9259e-04 4.3812e-02 4.4648e-02
(2 ,63,.,.) =
3.2748e-02 4.1444e-02 -8.1932e-03
4.5541e-02 2.9426e-02 -8.5440e-03
1.1634e-04 1.8045e-03 1.4826e-02
...
⋮
(61,0 ,.,.) =
-4.4144e-02 -8.3106e-02 -5.3073e-02
3.2124e-02 1.0286e-02 2.4409e-02
6.1606e-03 -1.9455e-02 4.0534e-02
(61,1 ,.,.) =
5.6026e-04 9.6961e-03 2.5010e-03
7.1679e-03 -1.7535e-02 -2.3857e-02
-9.8745e-03 -1.8550e-02 1.7301e-03
(61,2 ,.,.) =
4.3882e-03 4.2049e-02 7.5950e-02
-6.5610e-02 -3.6130e-02 -1.9404e-02
-3.8091e-02 -2.6749e-02 -1.3865e-02
...
(61,61,.,.) =
-4.5593e-02 -4.6050e-02 -2.2809e-02
-9.7648e-03 2.4910e-03 2.4503e-02
2.0381e-02 5.2393e-02 6.9019e-02
(61,62,.,.) =
9.3306e-04 1.2483e-02 -1.1817e-02
-1.2627e-02 -1.8756e-02 -1.4144e-03
-5.2490e-03 -4.6126e-03 -1.3224e-02
(61,63,.,.) =
7.4689e-04 -1.0135e-02 -7.8264e-03
1.2491e-02 -2.5865e-02 4.0514e-02
5.8855e-03 4.5990e-02 1.0651e-01
⋮
(62,0 ,.,.) =
1.2262e-02 -1.5378e-02 1.3862e-03
4.1166e-02 -2.4944e-02 -2.6686e-02
-1.7423e-02 5.2690e-03 -2.1861e-02
(62,1 ,.,.) =
-3.1207e-02 -3.3025e-02 2.2114e-02
-2.4009e-02 1.2988e-02 2.2430e-02
1.0332e-02 4.3601e-03 4.7321e-03
(62,2 ,.,.) =
2.0182e-02 6.1569e-02 -2.8771e-02
5.8231e-02 4.6767e-02 -2.8417e-05
3.7545e-02 -4.5886e-02 1.5849e-02
...
(62,61,.,.) =
7.0431e-03 -3.6082e-03 7.1986e-03
2.4895e-02 6.1671e-03 -3.2427e-02
7.2338e-03 2.2406e-03 -5.3330e-02
(62,62,.,.) =
2.8072e-02 -1.0571e-02 -1.3854e-02
-1.0879e-02 6.1929e-03 -5.6713e-03
-2.6083e-02 8.1861e-03 -3.2873e-02
(62,63,.,.) =
-3.1032e-02 -6.0485e-02 -2.5583e-02
-4.6239e-02 -2.2805e-02 -7.7678e-03
-9.4698e-03 4.0247e-03 -4.8637e-03
⋮
(63,0 ,.,.) =
2.3128e-02 -5.6038e-02 -3.4572e-02
1.0638e-03 5.7929e-02 -7.6970e-03
-3.0103e-02 3.5573e-02 -1.8143e-02
(63,1 ,.,.) =
9.6840e-02 -1.1186e-01 -7.8766e-02
-1.0444e-01 -1.0851e-01 -1.9553e-01
-1.1986e-01 -7.1474e-02 3.6750e-02
(63,2 ,.,.) =
-2.2194e-02 6.0298e-03 5.6914e-02
-4.8342e-02 7.8893e-02 -5.1026e-02
-5.1294e-02 -5.7434e-02 -1.9178e-02
...
(63,61,.,.) =
-4.4896e-02 -8.1267e-02 5.1794e-02
-8.3985e-02 -5.7778e-02 6.7891e-02
2.3837e-02 3.8954e-02 4.1141e-02
(63,62,.,.) =
4.6446e-03 2.7367e-02 -2.3154e-02
2.0675e-02 2.3429e-02 6.4380e-04
-5.2222e-02 -1.4854e-02 -2.5150e-02
(63,63,.,.) =
2.1291e-02 1.2736e-02 8.4553e-03
-8.2932e-02 7.2067e-02 1.3107e-01
8.5491e-03 1.3677e-01 3.9867e-02
[torch.FloatTensor of size 64x64x3x3]
), ('layer1.1.bn2.weight',
0.2560
0.5690
0.4042
0.5130
0.2178
0.4940
0.3315
0.5510
0.4354
0.5291
0.2081
0.4735
0.5945
0.5645
0.2761
0.2571
0.4853
0.6240
0.4370
0.2308
0.4970
0.3157
0.5706
0.2162
0.1932
0.1448
0.2218
0.2389
0.5871
0.3501
0.4109
0.3199
0.5808
0.3281
0.2723
0.1971
0.6139
0.4075
0.6304
0.3874
0.7605
0.2111
0.3071
0.4603
0.3099
0.1914
0.4431
0.2537
0.5745
0.6459
0.3914
0.3090
0.6782
0.1937
0.5814
0.2570
0.3514
0.2124
0.5794
0.3415
0.2051
0.0715
0.4090
0.4416
[torch.FloatTensor of size 64]
), ('layer1.1.bn2.bias',
-0.1778
-0.1287
0.0349
-0.1452
0.1864
-0.1413
-0.4201
-0.1334
0.2183
-0.1912
0.0311
-0.0235
-0.1724
-0.0274
-0.0295
-0.1031
0.0047
0.0828
-0.1521
0.0183
-0.2418
-0.0831
-0.0491
-0.0688
-0.2560
0.1381
-0.0165
0.2092
-0.0028
-0.0265
-0.0225
0.0286
-0.1065
-0.3698
0.2862
-0.1036
0.3080
-0.0894
0.2772
0.1136
-0.3157
0.0423
0.0567
0.2369
-0.0727
0.0465
-0.0536
0.1309
0.0282
-0.1371
0.1464
-0.0717
-0.3237
-0.1583
-0.0424
-0.1278
-0.1703
0.0413
0.0891
0.0770
-0.0730
0.0683
-0.0391
0.0476
[torch.FloatTensor of size 64]
), ('layer1.1.bn2.running_mean',
-0.0555
-0.2037
0.7682
-0.0659
0.4746
-0.0462
-0.0896
0.0405
-0.2446
-0.3079
0.2418
-0.0135
-0.0139
-0.5716
0.1631
-0.1234
-0.0607
-0.0682
0.0326
0.0245
-0.1008
0.0646
0.0028
-0.0101
-0.0145
0.0377
-0.0842
0.0183
-0.5056
-0.0529
-0.0573
-0.1212
-0.3578
-0.2472
-0.3403
0.0570
-0.2512
-0.2658
-0.1210
-0.0369
-0.0996
0.2838
0.1478
-0.1105
-0.4597
-0.1867
-0.2858
0.1237
-0.1291
-0.2389
0.0203
0.1081
-0.2310
-0.0848
-0.0316
0.2546
0.0597
-0.1729
-0.0190
0.1898
0.0823
0.0380
-0.0429
0.1392
[torch.FloatTensor of size 64]
), ('layer1.1.bn2.running_var',
0.0485
0.1034
0.0663
0.0458
0.1147
0.0534
0.0654
0.0467
0.0442
0.0820
0.0332
0.0400
0.0379
0.0849
0.0409
0.0282
0.0821
0.0699
0.0327
0.0497
0.0506
0.1060
0.0921
0.0300
0.0170
0.0383
0.0358
0.0383
0.0745
0.0579
0.0390
0.0504
0.0494
0.0617
0.0458
0.0347
0.0525
0.0575
0.0475
0.0354
0.0658
0.0336
0.0437
0.0734
0.0574
0.0596
0.0452
0.0403
0.0789
0.0551
0.0328
0.0775
0.0722
0.0390
0.0501
0.0394
0.0454
0.0450
0.0899
0.0297
0.0527
0.0184
0.0526
0.0340
[torch.FloatTensor of size 64]
), ('layer2.0.conv1.weight',
( 0 , 0 ,.,.) =
-7.1555e-02 -1.1031e-01 -1.3711e-01
7.0593e-02 -1.4782e-02 -1.0053e-01
1.1938e-01 8.7330e-02 -8.2206e-03
( 0 , 1 ,.,.) =
-2.3999e-02 -6.3682e-03 2.4303e-03
6.1831e-03 1.8781e-02 2.5324e-02
2.3656e-03 -4.0037e-03 -1.1949e-02
( 0 , 2 ,.,.) =
6.0344e-03 6.3784e-03 -1.2247e-02
7.8854e-03 -1.3464e-02 -4.2702e-02
1.7380e-02 -1.3862e-02 -4.7145e-02
...
( 0 ,61 ,.,.) =
3.4324e-02 3.2257e-02 2.5819e-02
8.4676e-03 -4.5413e-04 -1.0832e-02
-6.7166e-03 -1.5052e-02 -2.6939e-02
( 0 ,62 ,.,.) =
-1.2089e-02 -2.3588e-02 -2.2689e-02
1.0135e-02 1.8285e-02 -1.5695e-02
2.1352e-02 5.8568e-02 4.2873e-02
( 0 ,63 ,.,.) =
1.4421e-02 -2.8298e-02 -7.0770e-03
3.0260e-02 -6.6294e-03 -1.6901e-02
3.9085e-02 1.4222e-02 2.2294e-02
⋮
( 1 , 0 ,.,.) =
-7.7911e-02 -7.3929e-02 -3.6671e-02
-3.4903e-02 -6.2355e-02 -3.7793e-02
-2.8379e-02 -5.4291e-02 -4.9411e-02
( 1 , 1 ,.,.) =
-1.2970e-02 -2.1825e-02 -2.8767e-04
7.6444e-03 1.7653e-02 1.6660e-02
3.8337e-02 2.3006e-02 -1.6620e-03
( 1 , 2 ,.,.) =
-8.7592e-02 -8.4735e-02 -5.5818e-02
-7.7731e-02 -8.0311e-02 -3.2554e-02
-5.6313e-02 -4.2047e-02 1.5247e-02
...
( 1 ,61 ,.,.) =
-3.2377e-02 -4.0018e-02 -2.9523e-02
-1.5294e-02 -1.4165e-02 2.7086e-03
1.1652e-02 2.3886e-02 2.4413e-02
( 1 ,62 ,.,.) =
2.0891e-03 -3.0475e-02 -3.3818e-02
6.7829e-03 3.8681e-04 -1.4540e-02
-3.1306e-03 6.7689e-03 8.4524e-03
( 1 ,63 ,.,.) =
3.0586e-02 4.6281e-02 3.8359e-04
5.3079e-02 6.7488e-02 3.0547e-02
2.3374e-02 4.3993e-02 -3.8713e-03
⋮
( 2 , 0 ,.,.) =
1.3878e-02 3.2724e-02 4.6584e-02
-8.0647e-03 1.6209e-03 1.5153e-02
-7.0342e-02 -5.3299e-02 -4.5920e-02
( 2 , 1 ,.,.) =
4.6035e-02 3.5400e-02 3.4941e-02
5.8351e-02 5.4640e-02 2.7162e-02
2.6799e-02 4.5056e-02 6.6886e-03
( 2 , 2 ,.,.) =
-3.3766e-02 -3.8605e-02 -2.4172e-02
-1.8285e-03 1.0888e-02 1.1425e-02
2.2282e-02 1.4024e-02 3.6332e-03
...
( 2 ,61 ,.,.) =
-1.6330e-02 -6.9552e-02 -8.9737e-02
3.9766e-02 1.5501e-02 -2.2695e-02
1.0290e-01 1.2294e-01 6.3867e-02
( 2 ,62 ,.,.) =
-4.2318e-03 4.9511e-02 -7.6289e-03
-2.7720e-02 7.0398e-03 -9.4052e-03
-6.7008e-02 -6.0542e-02 -2.5967e-02
( 2 ,63 ,.,.) =
-5.8560e-03 -1.7573e-02 -3.8016e-02
2.8579e-03 -4.1603e-03 1.0113e-02
2.6243e-02 3.5200e-02 3.1143e-02
...
⋮
(125, 0 ,.,.) =
-4.4193e-02 -6.5322e-02 -1.7594e-02
-9.3970e-02 -5.8291e-02 1.2093e-02
-2.2998e-02 3.2463e-02 7.1731e-02
(125, 1 ,.,.) =
-4.7220e-03 -3.0125e-03 -1.8075e-02
1.2667e-02 -8.0509e-03 -1.4605e-02
7.8220e-03 -1.0720e-02 -2.6515e-02
(125, 2 ,.,.) =
-2.5299e-02 -4.9383e-02 -1.2720e-02
-5.2206e-02 -4.7233e-02 -4.2470e-03
-4.8697e-02 -2.5320e-02 8.6178e-03
...
(125,61 ,.,.) =
-3.7617e-03 7.8398e-03 -5.9525e-03
4.0277e-03 7.3575e-03 -1.1667e-02
-3.9997e-02 -3.8038e-02 -5.0469e-02
(125,62 ,.,.) =
-3.8949e-03 -6.8965e-03 3.4102e-02
-6.9814e-03 -4.9762e-02 5.8711e-02
1.8361e-02 2.5874e-02 8.0028e-02
(125,63 ,.,.) =
-3.3014e-02 -2.1510e-02 -2.1509e-03
-4.3894e-02 -3.2009e-02 -1.6265e-02
-1.1037e-02 2.8872e-04 3.0937e-02
⋮
(126, 0 ,.,.) =
-4.9907e-02 -5.0222e-02 -5.0985e-02
2.2644e-02 -1.4098e-02 -2.4426e-02
1.9960e-02 9.6426e-02 1.0580e-01
(126, 1 ,.,.) =
-3.6873e-02 2.1413e-03 8.3469e-03
-4.0796e-02 -3.3767e-02 -3.4955e-02
3.9466e-02 7.0508e-02 8.6065e-02
(126, 2 ,.,.) =
1.4842e-02 6.6914e-03 1.4324e-02
-3.2621e-02 -4.4027e-02 -2.2269e-02
7.1982e-03 -1.9187e-02 -4.9348e-03
...
(126,61 ,.,.) =
-4.9938e-03 1.6018e-02 1.1242e-02
-4.7668e-03 2.1921e-02 2.2660e-02
-2.6753e-02 2.6917e-04 -5.6827e-03
(126,62 ,.,.) =
-8.7725e-03 1.0761e-02 7.3603e-03
-1.8010e-05 -1.7926e-02 4.8229e-03
4.2431e-02 -1.5764e-02 2.3554e-02
(126,63 ,.,.) =
-1.3830e-02 -3.0793e-03 -4.0854e-03
3.3363e-02 4.2952e-02 3.5867e-02
-3.9653e-02 -3.0855e-02 -4.3189e-02
⋮
(127, 0 ,.,.) =
-3.8617e-02 -3.1549e-03 2.5739e-03
-1.1592e-02 9.8761e-03 7.5235e-03
-1.9339e-02 -9.8779e-03 2.1755e-03
(127, 1 ,.,.) =
1.6889e-04 1.8302e-03 -8.9537e-03
5.8343e-03 1.7360e-02 -1.9029e-02
5.8642e-03 -7.4307e-04 1.4667e-03
(127, 2 ,.,.) =
-1.6506e-02 -2.8401e-02 1.3986e-02
-2.2922e-02 -4.3484e-02 1.0471e-02
-2.5801e-03 -4.5258e-02 7.9791e-03
...
(127,61 ,.,.) =
-1.5260e-03 -7.6469e-03 1.3597e-02
5.5301e-04 -2.9176e-03 2.2147e-02
3.2763e-03 -1.0775e-05 1.3163e-02
(127,62 ,.,.) =
5.1756e-03 1.8495e-02 -8.0268e-03
-3.5030e-02 2.6403e-02 -7.1220e-03
-5.2325e-02 -1.1185e-02 1.9146e-02
(127,63 ,.,.) =
-6.8805e-02 5.1618e-02 1.9787e-02
2.5533e-02 -6.1926e-02 4.9924e-02
1.0532e-01 -4.4136e-02 4.9907e-02
[torch.FloatTensor of size 128x64x3x3]
), ('layer2.0.bn1.weight',
0.3248
0.3613
0.2960
0.2913
0.3407
0.3435
0.3049
0.3308
0.3447
0.3860
0.3196
0.2622
0.2994
0.2189
0.2397
0.3744
0.3555
0.1948
0.3349
0.2159
0.3349
0.3454
0.3094
0.3769
0.3546
0.3267
0.3178
0.3272
0.3832
0.2585
0.2973
0.3481
0.2827
0.2995
0.3451
0.3471
0.3440
0.3344
0.3211
0.3180
0.2940
0.3353
0.3253
0.3733
0.3198
0.2987
0.1620
0.3262
0.3271
0.3410
0.3693
0.3320
0.3357
0.2951
0.3115
0.3185
0.3139
0.2633
0.3089
0.3601
0.2734
0.3433
0.3335
0.3288
0.2706
0.2879
0.3318
0.3310
0.3170
0.2977
0.3300
0.3216
0.3205
0.3231
0.3481
0.3130
0.2826
0.2856
0.3279
0.3666
0.3288
0.3575
0.3377
0.2904
0.3273
0.3214
0.3332
0.3452
0.1842
0.3916
0.3337
0.2325
0.3285
0.3358
0.2885
0.3149
0.3288
0.2236
0.3159
0.2993
0.3403
0.3220
0.3171
0.2950
0.2847
0.3224
0.3119
0.2613
0.3374
0.3333
0.3330
0.2959
0.4087
0.2192
0.2982
0.4006
0.3081
0.3171
0.2862
0.2952
0.3070
0.3583
0.3232
0.3345
0.3453
0.3043
0.3327
0.3337
[torch.FloatTensor of size 128]
), ('layer2.0.bn1.bias',
-0.0589
-0.1686
-0.0206
0.0027
-0.0955
-0.1048
0.0349
-0.0885
-0.2053
-0.1764
-0.1224
-0.0364
-0.0785
0.2088
-0.0403
-0.1820
-0.1076
0.2989
-0.0570
0.2064
-0.0921
-0.1376
-0.1304
-0.1193
-0.1006
-0.0380
-0.1108
-0.0477
-0.1087
0.1581
-0.1123
-0.1584
0.0976
-0.0430
-0.1349
-0.1189
-0.0986
-0.0479
-0.0837
-0.0720
-0.0836
-0.2442
-0.3376
-0.2124
-0.0693
-0.0651
0.4979
-0.0811
-0.1021
-0.0788
-0.1802
-0.1011
-0.1090
-0.0617
-0.0856
-0.0495
-0.0370
0.0023
-0.0508
-0.2430
0.0009
-0.1525
-0.0963
-0.0516
-0.0473
0.0884
-0.1028
-0.0907
-0.1086
-0.0379
-0.1030
-0.1609
-0.0903
-0.0898
-0.1282
-0.0830
-0.0186
-0.0232
-0.0045
-0.2131
-0.1431
-0.1391
-0.1303
-0.0568
-0.1862
-0.1209
-0.0340
-0.1181
0.2298
-0.2085
-0.1335
0.1418
-0.0891
-0.1273
0.0107
-0.1029
-0.1025
0.1562
-0.0937
-0.0657
-0.1245
-0.0451
-0.0707
-0.0447
0.0715
-0.0484
-0.0312
-0.0437
-0.0927
-0.1465
-0.1151
-0.0183
-0.1927
0.2491
0.0300
-0.1310
-0.0468
-0.0851
-0.0421
-0.0413
-0.0457
-0.1433
-0.0981
-0.1046
-0.1315
-0.1249
-0.0982
-0.0961
[torch.FloatTensor of size 128]
), ('layer2.0.bn1.running_mean',
0.1502
0.3009
-0.1475
-0.1210
-0.5701
-0.7525
0.0232
-0.1191
-0.5203
-0.0344
0.1527
-0.8009
-0.2133
-0.1956
-0.4503
-0.2632
0.0839
-1.3614
0.3520
0.0435
-0.5124
-0.4489
0.3674
-0.7865
-0.0061
-0.5502
-0.2629
-0.0697
-0.3892
0.8596
-0.0261
0.0194
-1.4822
0.2077
0.0741
-0.5370
0.6348
0.0066
-0.6156
-0.6373
-0.2649
0.3021
-0.6140
-0.8625
-1.1688
-0.2691
-0.7569
-0.7104
-0.5601
-0.3803
-0.6424
-0.5653
-0.3943
-0.8532
-0.8817
-0.5444
-0.2364
-0.2572
-0.0131
-1.1256
0.2372
-0.2265
-0.1682
-0.7450
-0.8640
0.2118
0.1918
0.5058
0.0755
-0.6975
-0.7518
0.5799
-0.2933
-0.0071
-0.6256
-0.2616
-0.6733
-1.1375
0.1193
-0.4987
-0.6461
-0.0576
0.0361
0.0026
-1.1884
0.2901
-0.7978
-0.2888
0.7106
-0.6718
-0.3914
0.3720
-0.4927
-0.5238
-0.0162
-0.5074
-0.3267
-1.2319
-0.1927
-0.1273
0.3230
-0.0156
-0.1317
-0.6099
-0.0179
-0.3003
-0.1247
0.1452
-0.4937
-0.4852
-0.3357
-0.3261
-0.3776
-0.3691
-0.6458
-0.3323
-0.0424
-0.2551
-0.6557
-0.2917
0.5345
-0.4286
0.1585
-0.3547
-0.1262
-1.0521
-0.2490
-0.2917
[torch.FloatTensor of size 128]
), ('layer2.0.bn1.running_var',
0.5933
0.5225
0.9587
1.2984
0.6954
0.5495
0.7123
0.7396
0.4001
0.5929
0.4880
0.6532
0.5901
0.5786
0.5031
0.6984
0.6699
0.3613
0.6258
0.5226
0.6306
0.6908
0.4390
0.7955
0.8504
0.8891
0.4963
0.7665
0.9027
0.6111
0.4632
0.4609
0.8053
0.4621
0.5438
0.6401
0.6203
0.6133
0.6070
0.6558
0.5315
0.2684
0.1446
0.5651
0.7784
0.6977
0.4379
0.6246
0.7749
0.7301
0.5099
0.6298
0.5608
0.7794
0.6851
0.6257
1.0322
0.7427
0.9320
0.3659
0.7492
0.5316
0.6421
1.2247
0.6555
0.7807
0.7130
0.5728
0.4848
0.6181
0.6428
0.5033
0.6044
0.6865
0.6463
0.6181
0.5878
0.6709
0.8211
0.5813
0.6612
0.6009
0.6994
0.5487
0.3528
0.4863
0.7886
0.6114
0.3858
0.6698
0.4507
0.7751
0.5016
0.5925
1.1193
0.7031
0.5847
0.4395
0.6343
0.5480
0.6623
0.7094
0.5908
0.8719
1.1220
0.7039
0.6290
0.4964
0.7137
0.6734
0.4282
0.7129
0.5942
0.7962
0.6300
0.9883
0.6343
0.7726
0.6982
0.6926
0.6692
0.7207
0.4516
0.6805
0.5262
0.4744
0.7139
0.6144
[torch.FloatTensor of size 128]
), ('layer2.0.conv2.weight',
( 0 , 0 ,.,.) =
-7.4379e-03 -9.8091e-03 2.7976e-03
-1.0780e-02 2.5794e-02 4.5517e-02
-2.7241e-02 5.3206e-03 1.3177e-02
( 0 , 1 ,.,.) =
3.5440e-02 2.5101e-02 7.8204e-03
4.0312e-03 1.9894e-02 2.7449e-02
3.5329e-02 3.5456e-02 1.3315e-02
( 0 , 2 ,.,.) =
1.9270e-02 -2.1333e-02 -3.6199e-02
-1.9590e-02 -1.8873e-02 -5.9538e-02
-2.1838e-02 -7.6875e-03 3.9487e-03
...
( 0 ,125,.,.) =
-6.8038e-03 1.0841e-02 -3.7045e-03
1.3479e-02 1.1362e-02 -1.3431e-03
8.1422e-03 1.9292e-04 5.5109e-04
( 0 ,126,.,.) =
7.6939e-03 7.7306e-03 4.3960e-03
-1.0202e-02 -1.1698e-02 -9.6343e-03
-3.9049e-03 1.8147e-02 1.3297e-02
( 0 ,127,.,.) =
1.2434e-02 -2.6889e-02 -1.1974e-02
2.6846e-02 2.6409e-02 -2.1473e-02
1.2892e-02 2.7632e-03 -5.4267e-03
⋮
( 1 , 0 ,.,.) =
1.2840e-02 1.8529e-02 -2.6782e-03
-1.6777e-02 -1.2281e-02 3.5471e-02
-8.6486e-04 2.4498e-02 1.8152e-02
( 1 , 1 ,.,.) =
-6.6870e-03 -2.0710e-02 -1.4421e-02
-7.3135e-03 4.2568e-02 7.4339e-03
2.7640e-02 1.5997e-02 1.5939e-02
( 1 , 2 ,.,.) =
-2.2903e-02 -2.0577e-02 2.3593e-02
-2.7524e-02 -5.6073e-02 -6.9899e-02
2.0502e-02 5.1301e-02 2.1989e-02
...
( 1 ,125,.,.) =
-2.7188e-02 -3.8969e-02 -3.9503e-02
-6.2117e-02 -7.4923e-02 -9.5650e-02
-6.0467e-02 -7.7697e-02 -7.4620e-02
( 1 ,126,.,.) =
2.8663e-02 2.9341e-02 2.8688e-02
7.9438e-03 4.7108e-02 1.4586e-02
-1.8200e-03 2.2035e-02 7.3670e-03
( 1 ,127,.,.) =
1.5625e-03 -1.6815e-02 -4.6104e-03
-8.1347e-03 -2.5480e-02 -5.2408e-02
-9.2823e-03 -9.6452e-04 -3.7804e-02
⋮
( 2 , 0 ,.,.) =
4.7439e-03 6.0541e-03 -7.1074e-03
2.3583e-02 -9.3094e-02 -7.9317e-02
-7.8970e-03 -5.0526e-02 -1.0469e-02
( 2 , 1 ,.,.) =
1.4810e-02 1.6199e-02 -5.1457e-02
8.5937e-03 8.1354e-03 -4.2865e-02
9.0780e-02 6.5382e-02 4.3530e-02
( 2 , 2 ,.,.) =
-1.3827e-02 -6.3971e-03 8.4965e-03
1.3832e-02 -1.2413e-02 5.3880e-03
2.0189e-02 -3.5255e-03 7.9905e-03
...
( 2 ,125,.,.) =
-9.2351e-04 1.8478e-02 -3.0603e-02
-1.7034e-02 5.6756e-03 -4.9061e-02
-3.2771e-02 -3.7422e-02 -4.5931e-02
( 2 ,126,.,.) =
-4.6355e-03 6.9231e-03 -1.0628e-03
-7.9292e-03 -4.9909e-02 4.2104e-02
-7.5158e-02 -4.7826e-03 -5.8031e-03
( 2 ,127,.,.) =
1.1503e-02 -1.4634e-02 3.7884e-02
1.4056e-02 4.8553e-02 2.3157e-02
2.1494e-02 -1.0090e-02 3.3782e-02
...
⋮
(125, 0 ,.,.) =
2.6448e-02 4.0213e-03 7.5348e-03
6.3626e-02 -3.1986e-02 -1.8433e-03
2.6220e-02 7.5575e-03 4.9462e-02
(125, 1 ,.,.) =
-2.8731e-02 -2.2669e-02 -5.1264e-02
-2.6000e-02 -4.8740e-02 -1.4003e-02
-1.7263e-02 -4.1574e-02 -1.1665e-02
(125, 2 ,.,.) =
-3.4972e-02 3.5634e-02 3.4700e-02
1.8265e-02 4.3594e-02 -2.6302e-02
1.7826e-02 3.5585e-02 1.1340e-02
...
(125,125,.,.) =
9.7429e-03 -1.7253e-02 -1.6983e-04
-1.9886e-02 8.1994e-02 1.2903e-02
-2.3786e-02 -4.7812e-03 4.8584e-02
(125,126,.,.) =
-2.4373e-02 -2.5836e-02 -3.5317e-02
-2.9582e-02 -9.6624e-02 -5.3546e-02
-1.5009e-02 5.9241e-03 -1.9719e-02
(125,127,.,.) =
6.8366e-03 -3.6779e-02 -2.5541e-02
-1.1634e-02 -2.3650e-02 -7.8005e-03
8.6452e-03 7.8958e-03 -1.8926e-02
⋮
(126, 0 ,.,.) =
3.2894e-02 2.9690e-02 1.1071e-02
3.8989e-02 -8.9897e-03 2.2632e-02
7.8374e-03 -2.7959e-02 -2.3005e-02
(126, 1 ,.,.) =
1.3667e-02 2.2886e-02 -1.8989e-02
2.7104e-03 1.1235e-02 7.4223e-03
2.1089e-02 4.3557e-02 1.0752e-02
(126, 2 ,.,.) =
-2.3662e-02 2.2110e-02 4.3471e-04
-3.0925e-02 6.0868e-02 1.6691e-02
-8.8467e-02 -8.3442e-02 -3.4247e-02
...
(126,125,.,.) =
-7.3418e-03 -1.9690e-02 1.7969e-03
7.2727e-03 -3.4322e-02 -2.4270e-02
-1.1512e-02 -6.0470e-02 -5.5070e-02
(126,126,.,.) =
3.0219e-03 2.6285e-03 1.7110e-02
-1.3418e-02 -8.5859e-03 9.0284e-03
1.9504e-02 9.4355e-03 4.5180e-03
(126,127,.,.) =
1.3674e-03 7.6213e-04 1.1925e-02
-2.3910e-03 -1.0733e-02 1.2625e-02
-5.0613e-03 -5.7724e-03 -1.4643e-02
⋮
(127, 0 ,.,.) =
-7.4213e-03 1.1754e-02 -4.2728e-02
1.6309e-03 -4.5691e-02 -1.3976e-01
-6.5419e-03 -2.0547e-03 -4.8392e-02
(127, 1 ,.,.) =
7.5053e-03 5.2659e-02 3.8849e-02
-1.2484e-02 8.4685e-02 6.2233e-03
1.3136e-03 -1.9656e-02 -8.2167e-02
(127, 2 ,.,.) =
-2.4916e-02 1.6551e-02 1.6914e-02
8.6507e-03 2.1444e-02 1.1694e-02
-9.0502e-04 3.0596e-02 1.3600e-02
...
(127,125,.,.) =
-7.8114e-03 2.2029e-02 -1.7545e-02
-7.5889e-03 -2.1149e-02 -3.6984e-03
1.2622e-02 -2.0709e-02 -5.3862e-03
(127,126,.,.) =
3.0152e-02 -8.2268e-03 -6.4910e-02
-2.3752e-02 -9.5375e-02 -5.3019e-02
-1.6835e-02 -1.1071e-02 9.9055e-04
(127,127,.,.) =
-2.4533e-02 -8.4685e-02 2.5065e-02
1.0639e-02 3.8693e-02 1.4004e-01
1.5497e-02 -9.5081e-03 4.0948e-03
[torch.FloatTensor of size 128x128x3x3]
), ('layer2.0.bn2.weight',
0.1454
0.3270
0.3113
0.2538
0.4086
0.3937
0.4400
0.3108
0.3406
0.2168
0.2170
0.3857
0.1971
0.2692
0.1663
0.2454
0.3232
0.3686
0.3893
0.3264
0.3875
0.4707
0.1958
0.4717
0.1673
0.3938
0.3044
0.1929
0.2175
0.2119
0.4230
0.3683
0.2455
0.2229
0.3370
0.3229
0.2688
0.3557
0.2581
0.4031
0.4492
0.3642
0.2599
0.1881
0.1359
0.2958
0.1913
0.3065
0.3981
0.4102
0.1874
0.4516
0.3340
0.1628
0.3599
0.1624
0.2886
0.1358
0.4491
0.2694
0.4823
0.3393
0.4764
0.3155
0.6005
0.4654
0.5264
0.2991
0.2992
0.4621
0.2614
0.4247
0.4662
0.4249
0.3345
0.2655
0.4048
0.3605
0.1782
0.3833
0.2823
0.3843
0.3307
0.2151
0.3317
0.1458
0.2771
0.4917
0.3199
0.4222
0.1559
0.4884
0.3267
0.3440
0.1608
0.4855
0.2677
0.1616
0.3221
0.4243
0.3661
0.1893
0.3400
0.3648
0.1779
0.3544
0.2852
0.2437
0.4472
0.3011
0.3997
0.6173
0.2794
0.4867
0.1502
0.6021
0.3604
0.4696
0.3711
0.2388
0.5347
0.1509
0.3213
0.4394
0.3229
0.4329
0.1489
0.3702
[torch.FloatTensor of size 128]
), ('layer2.0.bn2.bias',
0.0246
0.0593
0.1347
-0.1089
-0.0470
-0.1359
-0.0550
0.0509
-0.0613
0.0916
0.0031
-0.0274
-0.0539
0.0177
0.0432
0.0074
0.0548
-0.0321
-0.0224
0.0142
-0.2150
-0.1160
0.0486
-0.1141
0.1066
0.0355
0.0140
0.0177
0.0781
0.1331
0.0139
0.0447
0.1063
0.0528
-0.0539
-0.1160
0.1055
-0.1591
0.0100
0.1197
0.0170
0.0929
-0.0675
0.0987
0.1034
0.0501
0.0297
0.0281
-0.0075
-0.0577
-0.0144
-0.1640
0.1255
0.0817
0.0635
0.0936
0.0213
0.0486
-0.1174
0.0237
-0.2177
0.0099
-0.1883
0.0467
-0.0829
0.0585
-0.0306
0.0509
0.0541
-0.1671
0.0115
-0.0302
-0.1393
0.0115
0.0428
0.1189
-0.1289
0.0479
0.0474
-0.0625
0.0009
-0.0144
0.0909
0.1342
-0.0338
0.0560
0.0848
-0.0467
0.0228
-0.0097
0.1360
-0.2625
0.0088
-0.0553
0.0383
-0.0720
0.0907
0.1612
-0.1076
0.1011
-0.0519
0.0838
-0.0704
-0.0806
-0.0243
0.0533
0.1277
0.1403
-0.0593
-0.0639
-0.0766
-0.1163
0.0661
-0.1644
0.0422
-0.2786
-0.1006
-0.0696
-0.0761
0.0371
-0.0247
0.0916
-0.0200
-0.0176
0.0298
-0.0373
0.0466
-0.1371
[torch.FloatTensor of size 128]
), ('layer2.0.bn2.running_mean',
-0.4532
-0.1524
-0.3771
-0.0713
-0.2878
-0.1534
-0.5443
-0.1878
-0.2956
-0.0365
-0.0336
-0.1475
0.0068
-0.1377
-0.1417
-0.3703
-0.4644
-0.1248
0.4767
0.0385
-0.3257
-0.1539
-0.3181
-0.1037
0.0639
-0.2066
-0.1608
0.0557
0.1252
-0.3812
-0.2301
-0.1256
-0.2842
-0.0949
-0.3629
-0.1013
0.3173
-0.1264
-0.1660
-0.1444
-0.9475
-1.0992
-0.0125
-0.0690
0.1497
-0.3284
0.0886
-0.3466
-0.2630
-0.1583
-0.3092
-0.0368
-0.0169
-0.3435
-0.3742
-0.2488
-0.1078
-0.3069
-0.1010
0.1655
-0.4201
-0.2702
-0.1342
-0.0495
-0.4643
-0.2271
-0.4530
-0.0365
-0.4531
-0.0485
0.0403
-0.0798
-0.0885
-0.1399
-0.5255
0.0254
-0.1210
-0.2685
-0.1447
-0.1114
-0.5782
-0.3445
-0.0098
-0.8503
-0.0380
-0.2450
-0.0705
-0.1167
-0.1946
-0.3769
-0.5091
0.2355
-0.1791
-0.2465
-0.2035
-0.3560
0.0100
-0.2061
-0.0407
-0.6231
-0.0431
-0.2874
-0.3627
-0.1486
-0.0271
-0.3714
0.1313
-0.1827
-0.2294
-0.0660
-0.0431
-0.9597
0.0849
-0.0855
-0.3286
-0.9559
-0.1640
-0.0745
0.1040
-0.3808
-0.4664
0.0823
-0.2148
-0.3367
-0.0775
-0.1677
-0.0668
-0.1016
[torch.FloatTensor of size 128]
), ('layer2.0.bn2.running_var',
0.0481
0.0571
0.0619
0.0319
0.0896
0.0538
0.0606
0.1026
0.0445
0.1045
0.0477
0.0751
0.0312
0.0500
0.0453
0.0511
0.0846
0.0792
0.1995
0.0590
0.0555
0.0877
0.0545
0.0825
0.0511
0.1046
0.0602
0.0467
0.0575
0.0667
0.0973
0.0930
0.0601
0.0702
0.0693
0.0347
0.1059
0.0404
0.0449
0.2404
0.1996
0.1850
0.0337
0.0491
0.0327
0.0976
0.0398
0.0999
0.0879
0.0753
0.0368
0.0639
0.1159
0.0487
0.1282
0.0614
0.0541
0.0333
0.0908
0.0726
0.0490
0.0751
0.0646
0.0694
0.1447
0.1111
0.1868
0.0648
0.0639
0.0538
0.0637
0.0589
0.0643
0.1066
0.1363
0.0845
0.0670
0.1007
0.0361
0.0741
0.0437
0.0776
0.0721
0.0685
0.0612
0.0608
0.0688
0.1067
0.0610
0.0797
0.0385
0.0575
0.0512
0.0672
0.0229
0.0898
0.0729
0.0448
0.0379
0.2440
0.0769
0.0878
0.0522
0.0541
0.0225
0.0741
0.1303
0.0576
0.0836
0.0499
0.0524
0.1636
0.0871
0.0577
0.0498
0.1113
0.0679
0.0683
0.0465
0.0505
0.1792
0.0842
0.0414
0.0971
0.0470
0.0575
0.0490
0.0455
[torch.FloatTensor of size 128]
), ('layer2.0.downsample.0.weight',
( 0 , 0 ,.,.) =
1.5916e-02
( 0 , 1 ,.,.) =
-3.1090e-01
( 0 , 2 ,.,.) =
1.2615e-02
...
( 0 ,61 ,.,.) =
-1.6723e-01
( 0 ,62 ,.,.) =
1.2692e-02
( 0 ,63 ,.,.) =
1.3152e-02
⋮
( 1 , 0 ,.,.) =
3.5526e-03
( 1 , 1 ,.,.) =
-1.0868e-03
( 1 , 2 ,.,.) =
-8.2883e-03
...
( 1 ,61 ,.,.) =
-2.3444e-02
( 1 ,62 ,.,.) =
-7.5592e-02
( 1 ,63 ,.,.) =
-1.2622e-02
⋮
( 2 , 0 ,.,.) =
-4.1898e-02
( 2 , 1 ,.,.) =
7.9478e-03
( 2 , 2 ,.,.) =
-1.6623e-01
...
( 2 ,61 ,.,.) =
3.1887e-02
( 2 ,62 ,.,.) =
-1.8766e-02
( 2 ,63 ,.,.) =
6.4507e-02
...
⋮
(125, 0 ,.,.) =
-2.8725e-02
(125, 1 ,.,.) =
4.7026e-02
(125, 2 ,.,.) =
-5.2251e-02
...
(125,61 ,.,.) =
-4.7365e-02
(125,62 ,.,.) =
5.8639e-02
(125,63 ,.,.) =
5.8808e-02
⋮
(126, 0 ,.,.) =
-7.7884e-03
(126, 1 ,.,.) =
-2.0288e-02
(126, 2 ,.,.) =
5.6392e-02
...
(126,61 ,.,.) =
7.8023e-01
(126,62 ,.,.) =
-2.2917e-03
(126,63 ,.,.) =
-2.5941e-02
⋮
(127, 0 ,.,.) =
-2.8316e-02
(127, 1 ,.,.) =
-1.3194e-02
(127, 2 ,.,.) =
-5.1356e-02
...
(127,61 ,.,.) =
2.3552e-02
(127,62 ,.,.) =
-6.7667e-02
(127,63 ,.,.) =
2.6754e-02
[torch.FloatTensor of size 128x64x1x1]
), ('layer2.0.downsample.1.weight',
0.3334
0.0581
0.0715
0.3442
0.1756
0.1509
0.1568
0.3100
0.1927
0.1516
0.3044
0.2238
0.3706
0.1739
0.3051
0.2610
0.1575
0.2015
0.2933
0.1010
0.5871
0.0676
0.2499
0.0929
0.2443
0.0495
0.2449
0.2750
0.3071
0.3025
0.1818
0.0688
0.2223
0.3766
0.4661
0.3284
0.1035
0.3400
0.2325
0.1514
0.1753
0.2269
0.2606
0.1831
0.2894
0.2590
0.2208
0.1399
0.0643
0.2833
0.3451
0.2017
0.0696
0.2722
0.1127
0.2917
0.2358
0.2703
0.0911
0.2591
0.1302
0.2261
0.1967
0.0539
0.0697
0.0524
0.1050
0.0861
0.1173
0.0957
0.1862
0.1642
0.1336
0.1065
0.1312
0.0888
0.0793
0.0475
0.3049
0.2325
0.2908
0.1292
0.0778
0.2263
0.2379
0.3405
0.0914
0.1936
0.1223
0.1400
0.2953
0.2360
0.1681
0.1338
0.2666
0.1495
0.0761
0.1674
0.1784
0.1720
0.2318
0.3753
0.2103
0.1922
0.4002
0.1718
0.0593
0.0742
0.0686
0.1931
0.1386
0.1111
0.3055
0.1205
0.3443
0.1633
0.3673
0.1534
0.0742
0.2088
0.0394
0.2594
0.1385
-0.0051
0.1905
0.1275
0.3071
0.1682
[torch.FloatTensor of size 128]
), ('layer2.0.downsample.1.bias',
0.0246
0.0593
0.1347
-0.1089
-0.0470
-0.1359
-0.0550
0.0509
-0.0613
0.0916
0.0031
-0.0274
-0.0539
0.0177
0.0432
0.0074
0.0548
-0.0321
-0.0224
0.0142
-0.2150
-0.1160
0.0486
-0.1141
0.1066
0.0355
0.0140
0.0177
0.0781
0.1331
0.0139
0.0447
0.1063
0.0528
-0.0539
-0.1160
0.1055
-0.1591
0.0100
0.1197
0.0170
0.0929
-0.0675
0.0987
0.1034
0.0501
0.0297
0.0281
-0.0075
-0.0577
-0.0144
-0.1640
0.1255
0.0817
0.0635
0.0936
0.0213
0.0486
-0.1174
0.0237
-0.2177
0.0099
-0.1883
0.0467
-0.0829
0.0585
-0.0306
0.0509
0.0541
-0.1671
0.0115
-0.0302
-0.1393
0.0115
0.0428
0.1189
-0.1289
0.0479
0.0474
-0.0625
0.0009
-0.0144
0.0909
0.1342
-0.0338
0.0560
0.0848
-0.0467
0.0228
-0.0097
0.1360
-0.2625
0.0088
-0.0553
0.0383
-0.0720
0.0907
0.1612
-0.1076
0.1011
-0.0519
0.0838
-0.0704
-0.0806
-0.0243
0.0533
0.1277
0.1403
-0.0593
-0.0639
-0.0766
-0.1163
0.0661
-0.1644
0.0422
-0.2786
-0.1006
-0.0696
-0.0761
0.0371
-0.0247
0.0916
-0.0200
-0.0176
0.0298
-0.0373
0.0466
-0.1371
[torch.FloatTensor of size 128]
), ('layer2.0.downsample.1.running_mean',
-0.2113
0.1359
0.0039
0.0886
-0.0546
-0.2716
0.2521
-0.2035
0.0303
-0.1464
-0.2640
-0.4436
-0.3815
-0.1463
0.0573
-0.2120
-0.0665
0.2438
0.0832
0.0040
-0.2136
-0.1755
-0.7201
-0.2233
0.1047
0.1467
-0.3165
-0.2010
0.2569
-0.8141
-0.0867
-0.0875
-0.9794
-0.2197
-0.0568
-0.3848
0.2579
0.1735
-0.0528
0.3276
-0.4380
0.1895
-0.1316
-0.3101
-0.2862
-0.0167
-0.2216
-0.1930
0.0454
-0.3049
0.1863
-0.5461
0.0461
0.1899
-0.0353
-0.2415
0.0813
0.4788
0.0519
0.0438
0.1379
-0.4036
-0.1231
0.0551
-0.0663
0.1699
-0.3095
-0.1080
-0.1431
0.2339
-0.2893
0.3513
0.1893
-0.0789
-0.5882
-0.1365
-0.2919
0.2869
0.3085
-0.1096
0.3905
-0.2630
-0.2150
-0.1966
-0.2579
-0.0904
0.0506
-0.0275
0.4067
0.0970
-0.3976
0.2176
0.2585
0.1078
-0.2607
-0.1126
-0.2001
-0.4400
-0.1181
0.2168
-0.1290
-0.1434
0.2498
-0.2811
-0.2768
-0.5209
0.1785
0.1161
-0.1806
-0.1448
-0.0704
-0.3591
-0.4581
-0.1117
-0.1916
0.7261
-0.2382
0.0126
0.0749
-0.0097
0.0480
0.9940
0.0634
0.0629
-0.7954
-0.1612
1.3040
-0.2879
[torch.FloatTensor of size 128]
), ('layer2.0.downsample.1.running_var',
0.1951
0.0151
0.0247
0.0691
0.0665
0.0386
0.0292
0.1873
0.0476
0.0859
0.1065
0.0916
0.1233
0.0595
0.1220
0.0878
0.0620
0.0835
0.1198
0.0264
0.1417
0.0151
0.0808
0.0223
0.1227
0.0093
0.1094
0.1057
0.1190
0.1483
0.0764
0.0185
0.0642
0.2118
0.1243
0.0555
0.0427
0.0556
0.1126
0.0959
0.0943
0.1135
0.0661
0.0704
0.1229
0.1406
0.0859
0.0672
0.0138
0.1057
0.1114
0.0589
0.0269
0.0969
0.0489
0.1290
0.0768
0.0935
0.0215
0.1296
0.0122
0.0591
0.0583
0.0216
0.0135
0.0106
0.0342
0.0199
0.0566
0.0168
0.0640
0.0537
0.0322
0.0318
0.0584
0.0361
0.0155
0.0159
0.0949
0.0965
0.0927
0.0331
0.0240
0.1121
0.0693
0.2177
0.0251
0.0650
0.0345
0.0357
0.1534
0.0568
0.0370
0.0442
0.0752
0.0413
0.0251
0.0582
0.0370
0.1190
0.0993
0.2644
0.0537
0.0495
0.1122
0.0638
0.0302
0.0376
0.0187
0.0634
0.0307
0.0378
0.1793
0.0240
0.2015
0.0337
0.1444
0.0368
0.0165
0.0710
0.0133
0.2638
0.0288
0.0057
0.0462
0.0291
0.1198
0.0450
[torch.FloatTensor of size 128]
), ('layer2.1.conv1.weight',
( 0 , 0 ,.,.) =
-9.9023e-04 -7.7429e-03 -7.9740e-03
2.4844e-02 1.8642e-03 5.8352e-03
9.5089e-03 -1.6476e-02 3.9157e-03
( 0 , 1 ,.,.) =
-2.1488e-02 -1.2330e-03 -1.4281e-02
-1.7044e-02 9.5922e-03 7.0445e-03
1.0790e-02 -7.2350e-03 -1.1357e-02
( 0 , 2 ,.,.) =
-1.1126e-03 3.0388e-02 2.2247e-02
-6.1184e-02 -2.3797e-02 2.3747e-03
4.0678e-02 -1.0356e-01 -6.0011e-02
...
( 0 ,125,.,.) =
-8.5833e-03 1.1438e-02 2.0800e-02
-1.6565e-02 -3.9587e-02 1.2594e-02
-1.4314e-03 -5.4257e-03 3.6794e-02
( 0 ,126,.,.) =
-1.3687e-02 -2.9514e-02 -1.4745e-02
2.8299e-02 2.2096e-02 3.4839e-03
-4.3521e-03 -2.6706e-03 1.2258e-04
( 0 ,127,.,.) =
7.6403e-03 2.0666e-02 3.7429e-02
6.9478e-03 4.3983e-02 1.7538e-02
-9.7797e-03 -2.4789e-02 -1.1349e-03
⋮
( 1 , 0 ,.,.) =
8.4439e-02 8.4827e-02 -5.1478e-02
3.5253e-02 -1.1375e-03 -1.0331e-01
-6.4078e-02 -1.2660e-01 -1.2952e-01
( 1 , 1 ,.,.) =
1.0628e-03 -1.4083e-02 4.7109e-03
-2.1059e-02 -2.8778e-02 9.9708e-03
1.4074e-02 1.8691e-02 5.8192e-02
( 1 , 2 ,.,.) =
2.2139e-02 8.9027e-03 1.4790e-02
-1.7497e-02 -5.3924e-03 2.7834e-02
-1.3855e-02 -1.3346e-02 1.7668e-02
...
( 1 ,125,.,.) =
-3.8032e-02 -2.3097e-02 -7.1775e-03
-3.5089e-02 1.0861e-02 1.3640e-02
6.3449e-04 9.7476e-03 7.3670e-03
( 1 ,126,.,.) =
-4.4184e-02 -1.6190e-02 1.2243e-02
-4.0349e-02 -1.7894e-02 2.8911e-02
-6.5176e-03 -1.0490e-02 9.1658e-03
( 1 ,127,.,.) =
4.3621e-03 1.3119e-02 1.8442e-03
1.1555e-02 -1.3031e-02 -9.5657e-03
-2.3314e-02 1.1609e-03 2.6771e-03
⋮
( 2 , 0 ,.,.) =
-2.1180e-02 -6.2213e-03 1.7609e-03
-4.7424e-03 1.1101e-02 1.1296e-02
-1.4529e-02 2.9843e-02 2.4383e-03
( 2 , 1 ,.,.) =
6.9183e-03 9.2937e-03 3.0078e-02
-4.2612e-03 4.9560e-03 -4.7338e-03
3.1360e-02 1.9035e-03 -4.7242e-03
( 2 , 2 ,.,.) =
-3.6726e-02 5.7285e-03 1.3919e-01
-4.2992e-02 9.4023e-04 7.7141e-02
-5.0050e-02 -4.9479e-03 2.4693e-02
...
( 2 ,125,.,.) =
3.7203e-02 7.4712e-03 -4.2659e-02
-8.1729e-03 -9.2536e-02 -5.4934e-03
-2.5927e-02 8.3993e-04 7.4632e-02
( 2 ,126,.,.) =
1.8076e-02 4.5272e-03 -1.3757e-02
-1.8939e-02 -3.2739e-02 -2.9666e-02
-2.0608e-02 -4.6167e-03 1.3080e-03
( 2 ,127,.,.) =
-1.2078e-02 -2.0285e-03 -1.6998e-02
-3.4805e-02 -4.9195e-02 -3.1973e-02
-2.1021e-02 -5.1164e-03 -4.8522e-03
...
⋮
(125, 0 ,.,.) =
3.1791e-02 2.2948e-02 1.0390e-02
-1.2628e-02 -2.9320e-03 4.2645e-03
-2.1707e-02 -1.0856e-02 1.6094e-02
(125, 1 ,.,.) =
-1.4525e-03 -1.0131e-02 -4.6862e-04
2.2130e-02 2.2736e-02 5.0183e-03
-6.0125e-02 -4.3150e-02 -4.4480e-02
(125, 2 ,.,.) =
3.0761e-03 3.4396e-03 6.0877e-03
-1.3683e-02 4.0576e-03 -2.6544e-02
6.8231e-02 6.3474e-02 -9.3660e-03
...
(125,125,.,.) =
1.8752e-02 1.9400e-02 4.1691e-02
8.7770e-03 8.2394e-04 1.8619e-02
1.8796e-02 6.2238e-02 -2.3801e-02
(125,126,.,.) =
-2.9788e-02 -3.4598e-02 -2.5225e-02
8.4234e-03 -2.3222e-02 -9.4612e-03
6.9035e-03 6.9737e-02 -1.3359e-02
(125,127,.,.) =
2.6981e-03 -4.3182e-02 -1.6731e-02
2.5812e-02 -7.2025e-02 -6.5399e-02
4.6257e-02 2.9469e-02 -1.5811e-02
⋮
(126, 0 ,.,.) =
-2.1079e-02 3.8220e-02 8.3305e-03
-5.9912e-03 3.5584e-02 -1.7534e-03
1.8735e-02 7.0859e-03 -3.5151e-03
(126, 1 ,.,.) =
-4.5937e-02 -7.4695e-02 -5.3608e-02
-8.6266e-03 9.0894e-03 -3.0345e-02
-2.8158e-02 -2.1204e-02 -8.4730e-03
(126, 2 ,.,.) =
-7.1772e-02 -6.8582e-02 2.5544e-02
5.0363e-02 2.5269e-02 5.6668e-02
2.6238e-03 1.3871e-03 -8.4692e-03
...
(126,125,.,.) =
-2.9644e-02 1.0896e-02 -3.0402e-02
1.5095e-03 5.0455e-02 1.5597e-02
-2.1015e-02 -1.0757e-02 -3.4942e-02
(126,126,.,.) =
-2.7573e-02 2.9707e-02 -2.9490e-02
2.3301e-03 -3.9011e-02 6.8010e-03
4.4006e-02 3.5397e-02 7.9087e-02
(126,127,.,.) =
-2.7480e-02 5.0337e-02 1.4290e-02
-5.2482e-02 -4.7748e-03 1.2988e-02
-1.8935e-02 -3.0808e-02 -1.7583e-02
⋮
(127, 0 ,.,.) =
3.2280e-02 4.7408e-02 3.4054e-02
2.1445e-02 3.8987e-03 4.6985e-04
1.5159e-02 8.2067e-03 3.2426e-02
(127, 1 ,.,.) =
9.2653e-03 2.3661e-02 4.2089e-02
2.1976e-02 4.6128e-02 1.1402e-02
7.2843e-03 5.2285e-02 8.6340e-03
(127, 2 ,.,.) =
1.4022e-02 1.2800e-02 3.5398e-02
-4.4398e-02 1.7399e-02 -1.5838e-02
3.1712e-02 5.8679e-02 -9.3244e-03
...
(127,125,.,.) =
-4.8399e-03 7.8628e-03 -5.6169e-04
8.0402e-03 1.7392e-02 7.8734e-03
-1.7713e-02 -4.5957e-02 -9.8762e-03
(127,126,.,.) =
-9.7569e-03 -7.5795e-03 -2.4627e-02
-8.2454e-03 6.3065e-02 -3.2954e-03
-7.7549e-03 -1.3404e-04 -8.1337e-03
(127,127,.,.) =
1.7664e-02 1.0114e-02 4.2687e-03
-3.7950e-03 2.6715e-02 2.0121e-02
1.6868e-02 -6.6515e-03 -1.1107e-02
[torch.FloatTensor of size 128x128x3x3]
), ('layer2.1.bn1.weight',
0.3323
0.2908
0.3246
0.3435
0.3011
0.3054
0.3041
0.3539
0.2862
0.3601
0.2970
0.3381
0.2565
0.3276
0.3030
0.4085
0.3519
0.4218
0.3055
0.2551
0.3425
0.3215
0.3366
0.2700
0.2849
0.3954
0.3166
0.3286
0.3515
0.3953
0.2768
0.3625
0.1988
0.2717
0.3355
0.2797
0.2510
0.3832
0.3266
0.3263
0.3681
0.3401
0.3651
0.3391
0.3071
0.3231
0.3691
0.2410
0.3536
0.3189
0.3238
0.3611
0.3086
0.3309
0.3886
0.4362
0.4550
0.2962
0.3071
0.3386
0.3317
0.3228
0.2393
0.3147
0.2738
0.3218
0.3198
0.3411
0.3611
0.2833
0.3035
0.3183
0.3146
0.3890
0.2607
0.3479
0.3236
0.3709
0.2592
0.3742
0.2555
0.2966
0.3505
0.3165
0.2808
0.2660
0.2817
0.4795
0.3372
0.2723
0.2955
0.3225
0.2470
0.3160
0.3515
0.3131
0.3372
0.2837
0.3540
0.2897
0.2490
0.3019
0.3114
0.3510
0.3022
0.3617
0.2859
0.2831
0.3243
0.2769
0.3314
0.2394
0.2932
0.2788
0.2686
0.3194
0.3542
0.2683
0.2955
0.2924
0.3538
0.4256
0.3603
0.3013
0.2763
0.4354
0.3991
0.2694
[torch.FloatTensor of size 128]
), ('layer2.1.bn1.bias',
-0.1735
-0.2337
-0.3383
-0.0806
-0.1920
-0.0621
-0.1885
-0.2830
-0.1680
-0.1796
-0.2645
-0.1983
-0.1183
-0.2432
-0.1706
-0.3090
-0.2661
-0.4040
-0.1949
-0.1392
-0.2449
-0.1242
-0.2012
-0.1901
-0.1014
-0.3468
-0.2245
-0.3272
-0.3057
-0.3289
-0.1532
-0.1967
-0.0667
-0.3281
-0.1418
-0.1527
-0.0987
-0.3243
-0.2252
-0.3462
-0.2284
-0.2263
-0.1810
-0.1564
-0.1730
-0.1507
-0.2913
-0.1643
-0.1998
-0.1532
-0.2211
-0.2247
-0.0913
-0.1563
-0.2453
-0.4854
-0.4428
-0.1021
-0.1615
-0.2125
-0.2239
-0.1952
-0.0447
-0.1733
-0.1178
-0.4775
-0.2110
-0.2305
-0.1795
-0.1582
-0.2008
-0.2041
-0.1974
-0.2750
-0.0395
-0.2161
-0.2786
-0.2626
-0.0997
-0.2953
-0.1431
-0.1448
-0.1894
-0.1283
-0.1807
-0.1144
-0.1308
-0.4154
-0.2324
-0.1376
-0.1154
-0.2099
-0.0966
-0.1669
-0.3835
-0.2545
-0.1603
-0.1904
-0.2420
-0.1658
-0.1133
-0.1498
-0.1213
-0.2318
-0.2017
-0.3827
-0.1491
-0.1174
-0.1261
-0.2031
-0.1832
-0.2274
-0.1281
-0.2557
-0.1400
-0.0723
-0.2212
-0.1486
-0.2914
-0.1116
-0.2194
-0.4898
-0.3693
-0.1437
-0.1232
-0.3723
-0.6794
-0.1536
[torch.FloatTensor of size 128]
), ('layer2.1.bn1.running_mean',
-0.3593
-0.4772
0.2329
-0.7139
-0.6713
-0.5552
-0.4556
-0.6502
-0.2082
-0.4011
-0.3942
-0.2970
-0.1626
-0.4379
-0.3334
-0.6163
-0.2982
-0.5190
0.1676
-0.1832
-0.2080
-0.5296
-0.4245
-0.1755
-0.8556
-0.3067
-0.4560
-0.1642
-0.5059
-0.4529
-0.4532
-0.7254
0.6037
-0.2509
-0.0199
-0.4672
-0.5901
-0.4195
-0.3272
0.5658
-0.3438
-0.5992
-0.2683
-0.4591
-0.3460
-0.1669
-0.3271
0.0351
-0.4175
-0.3984
-0.4118
-0.3619
-0.1313
-0.2758
-0.7196
-0.5401
-1.1739
-0.0497
-0.1358
-0.6139
-0.5143
-0.3017
-0.0465
-0.3977
-0.0251
0.3821
-0.5079
-0.2795
-0.1904
0.3993
-0.4418
-0.1813
-0.6122
-0.3132
-0.0656
-0.4458
0.0894
-0.3759
0.0440
-0.3972
-0.2860
0.0877
-0.0825
-0.7620
-0.0260
-0.3861
-0.1128
-0.4129
-0.2883
1.1054
-0.3892
-0.0393
-0.1394
-0.1678
0.1825
-0.4379
-0.2522
-0.1119
-0.5098
-0.0328
-0.2874
-0.3809
-0.1929
-0.3355
-0.3863
-0.1617
-0.2289
0.1665
-0.6874
-0.1705
-0.5216
-0.3315
-0.6678
0.5342
0.1433
-0.5558
-0.4277
-0.3240
-0.2142
0.0216
-0.4379
-0.8486
-0.7675
-0.4512
0.2788
-0.9694
-1.1691
0.0198
[torch.FloatTensor of size 128]
), ('layer2.1.bn1.running_var',
0.2660
0.1422
0.2404
0.4297
0.1306
0.3336
0.1939
0.1600
0.2166
0.4070
0.1029
0.3442
0.2021
0.1590
0.2226
0.1842
0.2731
0.2262
0.2178
0.1536
0.1722
0.2035
0.3391
0.1572
0.2276
0.2740
0.1543
0.1307
0.1649
0.2571
0.1431
0.2349
0.1765
0.1171
0.3401
0.1657
0.1307
0.3169
0.1973
0.1504
0.3181
0.2529
0.2980
0.2461
0.2857
0.2814
0.1889
0.1125
0.2079
0.2131
0.2158
0.3372
0.2791
0.2852
0.5102
0.1808
0.2540
0.3876
0.2048
0.1716
0.2775
0.2385
0.1992
0.3325
0.1832
0.1246
0.1852
0.2083
0.3179
0.3077
0.1842
0.1845
0.1684
0.2447
0.2990
0.2412
0.3370
0.1974
0.1679
0.2459
0.1670
0.1764
0.2258
0.3743
0.1464
0.1706
0.2925
0.2594
0.2123
0.2191
0.2281
0.1809
0.1278
0.2575
0.3387
0.1755
0.3083
0.1399
0.2197
0.1594
0.1311
0.2250
0.3422
0.2391
0.1240
0.2068
0.2784
0.1800
0.3133
0.1167
0.3066
0.1008
0.1729
0.3045
0.2187
0.2862
0.2361
0.1560
0.1271
0.2467
0.2201
0.1423
0.1531
0.2995
0.2069
0.2126
0.1369
0.1566
[torch.FloatTensor of size 128]
), ('layer2.1.conv2.weight',
( 0 , 0 ,.,.) =
-1.6153e-02 5.0134e-03 -9.0186e-04
-8.8386e-03 -1.9390e-02 -2.4174e-02
6.3052e-03 1.0245e-02 -1.3816e-02
( 0 , 1 ,.,.) =
-1.0979e-02 2.6164e-03 2.3656e-02
-1.7687e-02 1.9861e-02 6.4150e-02
6.0224e-03 7.6342e-02 1.0215e-01
( 0 , 2 ,.,.) =
-8.1113e-03 6.8414e-03 2.5436e-02
-8.0696e-03 9.2929e-03 8.2899e-03
7.7306e-03 1.2159e-02 7.1625e-03
...
( 0 ,125,.,.) =
1.5175e-02 6.2196e-03 2.1798e-02
-1.5199e-02 -8.5439e-02 -2.4713e-02
-1.8460e-02 -4.9767e-02 -1.6818e-03
( 0 ,126,.,.) =
3.0728e-02 3.9962e-02 3.1253e-02
-1.8738e-02 -6.7510e-02 -2.7649e-02
2.8429e-02 3.1854e-02 1.0543e-02
( 0 ,127,.,.) =
-1.8320e-02 -1.5854e-02 -1.0685e-02
-2.7442e-02 -3.0616e-02 -1.0485e-02
-1.5122e-02 -1.0595e-02 -2.5322e-02
⋮
( 1 , 0 ,.,.) =
3.6868e-03 3.0996e-02 4.2763e-02
4.6537e-02 4.8606e-02 2.3800e-03
1.6654e-02 1.2900e-02 -1.8230e-02
( 1 , 1 ,.,.) =
-1.0441e-02 -1.5934e-03 -1.6128e-02
-1.2799e-02 4.9570e-03 -1.4585e-02
-2.3553e-02 -3.7023e-03 -1.4399e-02
( 1 , 2 ,.,.) =
1.0338e-02 -1.7560e-02 -3.3046e-02
-3.2090e-02 -5.9258e-03 2.0201e-03
-4.1428e-02 4.9121e-03 1.6906e-02
...
( 1 ,125,.,.) =
-4.9525e-02 -4.6498e-02 -5.9916e-02
-2.6670e-02 -1.9079e-02 -2.9419e-02
-3.9683e-03 1.9405e-02 7.3317e-03
( 1 ,126,.,.) =
1.4293e-02 1.5643e-02 5.8117e-04
5.1493e-03 7.4332e-03 -3.6928e-03
-1.3522e-02 -8.5536e-03 -2.1259e-03
( 1 ,127,.,.) =
-3.0908e-02 -1.9839e-02 -1.9375e-02
-1.0368e-02 -2.4294e-02 2.4103e-04
-1.9275e-02 -2.9707e-02 -1.5623e-02
⋮
( 2 , 0 ,.,.) =
-4.9212e-02 -2.9588e-02 8.8023e-02
4.7453e-03 4.3564e-02 9.3115e-02
7.4083e-02 4.2868e-02 -5.1033e-02
( 2 , 1 ,.,.) =
6.6992e-03 2.1676e-02 -5.4254e-04
1.9286e-02 1.0920e-02 -4.5440e-03
3.1075e-02 -1.7168e-03 -2.7603e-02
( 2 , 2 ,.,.) =
6.0096e-02 -2.9359e-02 -5.8911e-02
-1.9133e-02 -8.1624e-02 -2.2553e-02
1.1597e-02 2.5092e-02 1.2130e-02
...
( 2 ,125,.,.) =
5.4307e-03 -2.3130e-02 9.6233e-03
-4.3785e-02 -2.6735e-02 2.1993e-02
-3.5919e-02 -4.1009e-02 -2.1860e-02
( 2 ,126,.,.) =
3.3705e-02 6.2938e-02 4.3502e-02
1.1111e-03 1.9243e-02 -1.9707e-03
-1.1493e-02 -5.3445e-02 -9.6676e-03
( 2 ,127,.,.) =
-2.6664e-03 -2.6954e-02 -1.7667e-02
-8.3382e-03 8.9920e-03 8.1260e-04
-2.6832e-02 -3.5991e-02 -4.2495e-02
...
⋮
(125, 0 ,.,.) =
-1.8876e-03 -2.2728e-02 -4.2991e-03
-9.2231e-03 -3.4333e-02 -1.3392e-02
-1.2774e-02 -1.1435e-02 1.5617e-02
(125, 1 ,.,.) =
1.0703e-02 1.2792e-02 2.2662e-02
7.3185e-03 -1.7847e-02 1.0674e-02
-1.5936e-02 -1.9318e-02 2.1768e-02
(125, 2 ,.,.) =
-7.3009e-03 3.0234e-02 -1.1899e-02
-2.6099e-02 3.7452e-03 3.2776e-02
-3.3101e-02 -7.1923e-03 1.6559e-02
...
(125,125,.,.) =
-3.2818e-02 -1.0021e-01 -4.7012e-02
2.8293e-03 4.1410e-02 -1.1391e-02
-1.1152e-02 -5.5861e-03 1.9968e-02
(125,126,.,.) =
-2.3932e-02 -3.0687e-02 -1.1756e-03
1.5311e-03 -3.5002e-02 -2.4414e-02
-8.7575e-03 -7.7842e-02 -3.8842e-02
(125,127,.,.) =
2.6107e-02 1.5406e-02 1.7569e-02
-1.5130e-02 -4.8687e-03 3.0773e-03
-1.3470e-02 -9.3201e-03 -4.8982e-03
⋮
(126, 0 ,.,.) =
-2.0228e-02 -3.0006e-02 -9.8419e-03
-3.8676e-02 -3.3481e-02 -7.4265e-03
-2.8935e-02 -3.2037e-02 2.9245e-03
(126, 1 ,.,.) =
-1.2900e-02 3.8046e-03 1.5940e-02
-2.4030e-02 2.0666e-03 5.7250e-03
6.9989e-03 1.2192e-02 1.5406e-02
(126, 2 ,.,.) =
-1.5018e-02 -9.0988e-03 2.4450e-02
1.0039e-02 1.2561e-02 2.6997e-02
2.9556e-02 1.9463e-02 -2.6584e-03
...
(126,125,.,.) =
-1.8481e-02 3.9417e-04 9.9768e-03
-4.5447e-03 1.2307e-02 3.5507e-02
-1.1873e-03 -2.6185e-03 1.1547e-02
(126,126,.,.) =
4.6292e-03 -1.3690e-02 -1.0171e-02
1.2104e-02 1.6793e-02 1.3003e-02
1.3328e-03 3.4701e-03 1.7323e-02
(126,127,.,.) =
-8.7332e-05 5.8646e-03 -3.5117e-03
3.8112e-03 -7.1828e-03 -1.1407e-02
1.9705e-02 2.0556e-02 5.7084e-03
⋮
(127, 0 ,.,.) =
3.6998e-02 3.2616e-02 -9.4535e-04
-2.9484e-02 -2.3441e-02 -2.8085e-02
-2.5451e-02 3.9048e-02 3.6686e-02
(127, 1 ,.,.) =
-1.8732e-02 -1.5352e-02 1.1149e-02
-2.1324e-03 -2.3177e-02 1.7628e-02
-4.0012e-03 1.5463e-02 9.2496e-03
(127, 2 ,.,.) =
-2.9346e-02 7.7071e-03 -5.6520e-03
-2.3611e-02 -1.9390e-03 2.0221e-02
8.0955e-03 -2.3268e-02 -2.8827e-02
...
(127,125,.,.) =
-3.3532e-02 -2.9092e-02 -4.0045e-02
2.6530e-03 -2.0568e-02 1.3075e-02
1.6061e-02 -5.5725e-02 -4.9167e-02
(127,126,.,.) =
-7.9132e-03 2.1466e-02 2.0913e-02
-1.7259e-02 -2.5851e-02 2.7177e-03
-4.6532e-02 -2.4846e-02 -1.9911e-02
(127,127,.,.) =
-5.0350e-02 -2.5574e-02 1.7763e-02
-3.4474e-02 5.5247e-03 -2.7754e-02
-2.0743e-02 -2.2332e-02 -4.3512e-02
[torch.FloatTensor of size 128x128x3x3]
), ('layer2.1.bn2.weight',
0.1194
0.1625
0.3084
0.2931
0.2957
0.5263
0.4038
0.2024
0.3401
0.1982
0.2559
0.2311
0.1630
0.2891
0.2248
0.2311
0.2417
0.2187
0.1922
0.3103
0.2015
0.4802
0.2481
0.3898
0.3204
0.4035
0.2617
0.1551
0.2256
0.2117
0.2708
0.3537
0.2505
0.1843
0.2465
0.6501
0.3898
0.4289
0.1799
0.1604
0.1775
0.3600
0.2694
0.1283
0.1662
0.1716
0.1837
0.1710
0.4178
0.3249
0.1759
0.4717
0.4115
0.1995
0.2025
0.1492
0.2860
0.1072
0.3649
0.1906
0.5369
0.2400
0.4411
0.1702
0.1993
0.2045
0.1972
0.4041
0.3034
0.6168
0.2284
0.3228
0.4547
0.4370
0.1570
0.4057
0.5791
0.2338
0.1586
0.3130
0.2201
0.3195
0.1166
0.2517
0.2184
0.0989
0.3116
0.2613
0.3277
0.1778
0.2718
0.4174
0.5140
0.2136
0.1905
0.2898
0.2472
0.1341
0.6212
0.1810
0.2394
0.1417
0.1759
0.2827
0.1987
0.3775
0.3749
0.1274
0.3656
0.4305
0.4212
0.2673
0.2016
0.5098
0.1449
0.4408
0.3583
0.2503
0.5682
0.2518
0.1392
0.0617
0.3406
0.1313
0.4586
0.2914
0.1326
0.3915
[torch.FloatTensor of size 128]
), ('layer2.1.bn2.bias',
-0.1403
-0.0889
-0.4147
-0.2264
-0.0737
-0.3534
-0.3379
-0.0752
-0.1791
0.0448
-0.2842
-0.1765
-0.1591
-0.0675
-0.1543
-0.1061
-0.2334
-0.0981
-0.0908
-0.0567
-0.1908
-0.2055
-0.2704
-0.1883
-0.3570
-0.1125
-0.1632
-0.0211
-0.1687
-0.2124
-0.1713
-0.0872
-0.2194
-0.1888
-0.2954
-0.4570
-0.0226
-0.0527
0.0406
-0.0609
-0.0456
-0.1176
-0.0145
0.0318
-0.2046
-0.0953
-0.0496
-0.1051
-0.0793
-0.1933
-0.1467
-0.3215
-0.3257
-0.2287
-0.0356
-0.1869
-0.1932
-0.0771
0.2768
-0.0656
-0.0895
-0.2548
-0.2365
0.0021
-0.0987
-0.3178
0.1613
0.0006
-0.2347
-0.4150
-0.1310
-0.3142
-0.2582
-0.5400
0.0772
-0.2546
-0.4454
-0.0262
-0.0937
-0.2201
-0.2044
-0.0155
-0.0893
-0.2167
0.1112
-0.0619
-0.1217
-0.1593
-0.1317
-0.1717
-0.3729
-0.3354
-0.3414
0.0358
-0.2067
-0.1087
0.0141
-0.0338
-0.2129
-0.1122
-0.1627
-0.2000
0.0908
-0.0041
-0.1313
-0.2942
0.0160
-0.1065
-0.1289
-0.1699
-0.1721
-0.1809
-0.2295
-0.3611
-0.1746
-0.3540
-0.1554
-0.2709
-0.2607
0.0084
-0.0311
-0.0022
-0.0831
0.0380
-0.4893
-0.2749
0.1245
-0.1272
[torch.FloatTensor of size 128]
), ('layer2.1.bn2.running_mean',
-0.0303
0.0327
0.0240
-0.0763
-0.1589
-0.0804
-0.1797
-0.0701
-0.1573
0.1134
-0.0805
-0.0234
-0.0756
-0.1833
0.0384
0.0791
-0.0594
-0.0217
0.0288
-0.1023
-0.0698
-0.0484
0.1234
-0.1242
0.0584
-0.1045
-0.0082
-0.0536
0.0127
0.0269
-0.1785
-0.0514
-0.0503
0.0173
0.0162
-0.2532
-0.2817
-0.2388
-0.0641
0.0136
0.1397
-0.2827
0.0767
-0.0328
-0.0080
-0.0058
-0.1322
-0.0266
-0.3995
-0.0825
-0.1061
-0.0556
-0.0557
0.0552
-0.1259
-0.0077
-0.1017
-0.0532
-0.1570
0.0675
-0.5579
0.0523
-0.1109
0.0096
0.0103
-0.0968
-0.0100
-0.2631
-0.1013
-0.0156
-0.0544
-0.1436
-0.0747
-0.0609
-0.0710
-0.1171
0.0205
0.0372
-0.0162
-0.0245
0.1684
-0.2868
0.0558
0.0402
-0.1360
-0.0523
-0.0547
-0.1108
-0.2490
-0.0252
0.0783
-0.1927
-0.1427
-0.1122
-0.0599
-0.0884
-0.0191
0.0015
-0.5522
0.0042
0.0305
0.0242
-0.1246
-0.1543
0.0045
-0.1808
-0.2224
0.0909
0.0329
0.5456
-0.0230
0.0628
0.0464
-0.0874
-0.0300
0.1108
-0.0492
-0.0331
-0.2471
-0.0352
0.0516
0.0709
-0.2409
-0.0650
-0.1684
-0.0565
-0.1306
-0.0627
[torch.FloatTensor of size 128]
), ('layer2.1.bn2.running_var',
1.00000e-02 *
1.8228
2.1225
2.8612
2.0886
2.9474
4.7744
3.5290
2.8429
2.8470
4.5069
2.9840
1.9491
1.7088
2.9840
2.8539
2.8518
2.0139
2.2774
3.2135
3.3348
1.7759
4.7420
3.0149
4.1645
3.7812
4.6252
2.9589
1.6504
2.6924
3.0834
3.6263
4.5937
3.1620
2.6538
2.2904
5.8237
5.6463
4.0456
2.6616
3.2348
3.3927
8.4368
2.4933
1.7348
2.2366
2.1032
1.9272
1.5102
5.6145
3.9999
1.8257
4.0509
3.2031
2.2098
3.3824
2.2704
2.5319
1.8465
6.8178
2.6885
7.2726
2.1805
4.9063
2.6663
2.3015
1.3440
4.7817
5.8346
3.3150
4.7472
1.8629
3.3559
4.5253
3.1564
3.6324
3.4589
4.7584
3.2355
1.7391
3.5121
1.8529
5.3177
1.3671
3.0469
3.7829
1.6996
4.1624
3.1600
3.2903
1.6922
3.2056
4.3576
3.5142
2.4761
1.6919
3.8553
3.6356
1.7814
6.2490
4.0622
2.5852
2.2963
2.7265
2.9650
2.0724
4.4788
5.7808
2.0073
3.9706
5.8224
4.3781
3.6008
2.6018
3.5214
1.9792
3.2273
4.9339
1.6944
6.2593
2.9896
2.5511
1.5677
3.6686
1.5467
3.1936
2.8402
2.8767
4.4939
[torch.FloatTensor of size 128]
), ('layer3.0.conv1.weight',
( 0 , 0 ,.,.) =
-1.5906e-02 -1.6618e-02 -1.5938e-02
-5.2744e-03 1.5103e-02 9.8805e-03
-1.4850e-02 3.6254e-04 -1.1378e-02
( 0 , 1 ,.,.) =
-9.4971e-03 -1.8568e-02 -6.0605e-03
9.7622e-03 -1.2294e-02 -5.2978e-03
7.0518e-03 -1.6063e-02 -7.1445e-03
( 0 , 2 ,.,.) =
-2.2693e-02 -3.7669e-02 -3.3695e-02
-3.1569e-02 -5.8022e-02 -3.9105e-02
-3.4616e-02 -3.8806e-02 -1.5695e-02
...
( 0 ,125,.,.) =
4.8713e-03 7.9539e-03 1.4374e-02
-1.5242e-03 2.4200e-02 5.6440e-03
-4.4355e-03 6.2454e-03 6.8561e-03
( 0 ,126,.,.) =
1.6028e-02 -1.2036e-02 -1.3101e-03
9.5804e-03 5.7272e-03 1.6091e-03
-9.9173e-03 -1.3593e-02 -6.3679e-03
( 0 ,127,.,.) =
5.3450e-02 4.6441e-02 2.4824e-02
3.4065e-02 -2.8656e-03 -4.1207e-03
-1.4000e-02 -4.6092e-03 -1.4152e-02
⋮
( 1 , 0 ,.,.) =
-1.1567e-03 -1.8638e-02 -3.4453e-02
4.9889e-03 -1.1695e-02 -3.3321e-02
5.9653e-03 -1.6154e-02 -1.7452e-02
( 1 , 1 ,.,.) =
1.0729e-02 1.3964e-02 -1.9171e-02
2.8854e-03 1.2573e-02 7.2767e-03
-1.6815e-02 -1.8740e-02 -1.3784e-03
( 1 , 2 ,.,.) =
-2.1852e-02 6.2900e-03 1.5931e-02
-3.5272e-03 5.6997e-03 3.1077e-02
2.3169e-03 3.2389e-03 1.7490e-02
...
( 1 ,125,.,.) =
-1.6246e-02 -7.7688e-03 7.7471e-03
-1.4870e-03 -1.2226e-02 -9.3389e-03
8.6164e-04 -2.2071e-03 7.3769e-03
( 1 ,126,.,.) =
2.9310e-03 -2.3592e-02 5.8461e-03
1.4344e-02 -1.6924e-02 -6.1749e-03
-7.7191e-03 -3.2305e-02 -3.3688e-02
( 1 ,127,.,.) =
8.6900e-03 1.3976e-02 8.0760e-03
-3.3662e-03 1.0516e-02 1.4952e-02
1.8944e-02 3.0948e-02 2.5647e-02
⋮
( 2 , 0 ,.,.) =
-3.5797e-02 -2.2565e-02 -1.4440e-02
-7.5372e-03 -2.2142e-02 1.1150e-02
-3.6385e-03 -1.4821e-02 -1.6427e-02
( 2 , 1 ,.,.) =
-1.4620e-02 -3.0657e-02 -2.0434e-02
-2.8462e-02 -4.5328e-02 -5.7915e-02
2.8774e-02 -1.5172e-02 -2.4541e-02
( 2 , 2 ,.,.) =
1.7403e-02 1.9920e-02 -4.6249e-03
1.7813e-02 2.3648e-02 1.3638e-02
2.9347e-02 4.3449e-02 1.8594e-02
...
( 2 ,125,.,.) =
7.9258e-03 -1.2183e-02 -1.5811e-02
-1.0720e-02 -3.1404e-02 -7.5279e-03
-7.0299e-03 -1.7342e-02 -3.0783e-02
( 2 ,126,.,.) =
-1.0258e-02 -1.1796e-02 -1.7141e-02
-2.6423e-02 -1.5036e-03 2.7959e-02
-8.9306e-03 5.3510e-03 9.6632e-03
( 2 ,127,.,.) =
1.4481e-02 -3.1531e-02 -1.9707e-02
-1.4944e-02 -1.7709e-02 7.6966e-03
1.2465e-02 7.1035e-03 -6.1596e-03
...
⋮
(253, 0 ,.,.) =
5.3120e-03 2.5512e-02 7.1053e-03
1.9666e-02 2.6990e-02 4.2043e-02
4.1191e-02 2.2283e-02 3.5003e-02
(253, 1 ,.,.) =
2.5968e-03 4.0685e-03 1.0626e-02
4.6474e-03 2.0337e-02 8.0847e-03
1.4475e-02 -3.0070e-03 -1.9656e-02
(253, 2 ,.,.) =
-4.0235e-03 2.5510e-02 2.2875e-03
-1.5182e-02 2.6031e-02 8.2526e-03
-2.1065e-03 2.6928e-02 3.2296e-03
...
(253,125,.,.) =
5.5063e-03 -4.8631e-03 1.8346e-02
8.5499e-03 2.3002e-03 7.7201e-03
8.2280e-03 9.5818e-03 2.1510e-02
(253,126,.,.) =
-1.7702e-02 9.9203e-03 -1.2934e-02
-1.2670e-02 9.5506e-03 -1.2438e-02
8.9810e-03 4.9343e-02 3.6238e-02
(253,127,.,.) =
1.2333e-02 1.8408e-02 -1.7794e-02
5.7676e-03 -5.7844e-03 -1.1706e-02
3.4462e-03 -1.0299e-02 -4.2529e-02
⋮
(254, 0 ,.,.) =
3.1634e-02 7.6514e-02 4.4300e-02
9.3963e-02 1.4798e-01 1.5104e-01
6.6483e-02 1.3856e-01 1.1323e-01
(254, 1 ,.,.) =
-2.8205e-02 -4.0731e-03 -1.9967e-02
-1.9283e-02 -1.2330e-03 1.0728e-02
-1.6487e-02 -2.7540e-03 7.7751e-04
(254, 2 ,.,.) =
-1.2156e-02 -3.2183e-02 -1.5299e-02
-9.1752e-04 -1.2350e-02 -3.8531e-03
-1.9342e-02 -1.0735e-02 -2.1051e-02
...
(254,125,.,.) =
-3.0457e-03 6.5687e-03 -3.2163e-04
1.4628e-02 -1.6662e-02 1.4216e-02
2.2738e-02 1.2016e-02 7.1802e-03
(254,126,.,.) =
3.9151e-03 -1.9739e-02 1.1058e-02
-2.5105e-02 -3.8439e-02 -4.4722e-02
-3.5862e-02 -9.8120e-02 -6.8447e-02
(254,127,.,.) =
-8.4853e-03 2.2905e-03 3.0757e-03
3.8484e-03 1.8156e-02 6.9025e-03
8.9456e-03 8.0009e-03 1.2579e-02
⋮
(255, 0 ,.,.) =
-1.3006e-02 -9.0262e-03 1.0574e-03
-2.5979e-02 -1.9484e-02 -9.3637e-03
4.8438e-03 2.3742e-03 1.0574e-02
(255, 1 ,.,.) =
-2.4782e-03 -1.4049e-02 -2.8621e-02
-2.3822e-03 1.1463e-03 -2.3321e-02
1.2275e-02 8.3306e-04 1.4305e-03
(255, 2 ,.,.) =
-4.8958e-02 -4.3860e-02 -5.7901e-02
-3.5920e-02 -3.6503e-02 -3.8574e-02
-4.1023e-02 -3.3337e-02 -1.3673e-02
...
(255,125,.,.) =
-1.1772e-02 -8.1042e-03 -1.5803e-02
-2.7190e-02 -2.8550e-02 7.5042e-03
-2.4363e-02 1.3943e-02 6.0615e-03
(255,126,.,.) =
-2.7317e-02 1.9704e-02 2.2183e-02
-3.7557e-02 2.0815e-02 1.8682e-02
-4.4557e-02 -4.3529e-03 -1.6779e-02
(255,127,.,.) =
1.9939e-02 2.6802e-02 1.1996e-02
2.0260e-02 2.1540e-02 2.5003e-03
1.8079e-04 -7.6315e-03 -1.9582e-02
[torch.FloatTensor of size 256x128x3x3]
), ('layer3.0.bn1.weight',
0.2856
0.2425
0.3032
0.3168
0.3011
0.3475
0.3076
0.3105
0.3646
0.3255
0.2195
0.3167
0.2674
0.3104
0.3026
0.3443
0.2915
0.3379
0.2887
0.2996
0.3588
0.3164
0.2882
0.2917
0.3492
0.3749
0.3587
0.3166
0.2756
0.2978
0.3364
0.2893
0.3106
0.2506
0.3460
0.3621
0.2570
0.3695
0.2935
0.3286
0.3243
0.3188
0.3093
0.3314
0.3550
0.2978
0.2737
0.3023
0.3179
0.2831
0.3065
0.3390
0.3053
0.3099
0.3017
0.3472
0.3034
0.2935
0.3352
0.3676
0.3163
0.3404
0.3078
0.2819
0.3794
0.3083
0.2778
0.3363
0.2284
0.3259
0.2790
0.3072
0.2975
0.3847
0.3372
0.2253
0.2827
0.3737
0.2796
0.3485
0.3879
0.3288
0.3340
0.3335
0.2756
0.3500
0.2897
0.2798
0.2907
0.3220
0.3824
0.3522
0.3278
0.3689
0.3147
0.3600
0.3123
0.2519
0.2355
0.3211
0.3203
0.3345
0.2768
0.3341
0.3153
0.3175
0.2224
0.2956
0.3206
0.2658
0.3662
0.2715
0.3655
0.3427
0.2820
0.2754
0.4669
0.3090
0.3468
0.3144
0.3220
0.2765
0.3301
0.3219
0.3152
0.2813
0.2497
0.3514
0.3264
0.3014
0.2734
0.3522
0.3831
0.3028
0.2940
0.2825
0.3099
0.2373
0.2705
0.4189
0.2985
0.3841
0.2754
0.3091
0.3169
0.2824
0.2749
0.3493
0.4018
0.3108
0.2176
0.2821
0.3199
0.3358
0.2468
0.3332
0.2876
0.2964
0.2385
0.3451
0.3081
0.2760
0.2533
0.2576
0.3092
0.2950
0.3089
0.3113
0.3475
0.3172
0.2474
0.3371
0.3450
0.3189
0.3150
0.3008
0.2694
0.3730
0.3235
0.2988
0.2812
0.3245
0.3630
0.2843
0.3533
0.3451
0.3244
0.3524
0.3118
0.3429
0.3215
0.2748
0.3287
0.3656
0.2901
0.2523
0.3284
0.2523
0.3426
0.2851
0.2918
0.2497
0.5159
0.3026
0.2743
0.2379
0.3524
0.3394
0.2264
0.2652
0.3759
0.3777
0.2459
0.3046
0.3067
0.3775
0.2976
0.3552
0.2696
0.2649
0.2872
0.2985
0.2867
0.3676
0.3494
0.3823
0.3246
0.3567
0.2662
0.3357
0.2935
0.2987
0.2664
0.3019
0.3175
0.2436
0.3274
0.2764
0.2466
0.2876
0.3060
0.3157
0.3329
0.2984
0.2961
0.3309
0.3729
0.3238
0.3491
0.3342
0.3037
0.3578
0.2849
0.2827
0.2809
0.3249
[torch.FloatTensor of size 256]
), ('layer3.0.bn1.bias',
-0.0915
0.0189
-0.1235
-0.0613
-0.1003
-0.1306
-0.1473
-0.1079
-0.2438
-0.1113
0.1361
-0.1477
0.0387
-0.0907
0.0352
-0.1851
-0.1319
-0.1746
-0.0815
-0.1004
-0.3394
-0.1712
-0.0807
-0.1228
-0.2263
-0.1503
-0.2314
-0.2327
-0.0854
-0.0802
-0.0716
-0.0839
-0.0592
0.0358
-0.0322
-0.2197
0.0027
-0.1471
-0.0264
-0.1886
-0.2417
-0.1494
-0.1904
-0.1089
-0.2657
-0.1362
-0.0487
-0.1340
-0.0930
-0.0064
-0.1721
-0.1476
-0.1714
0.0336
-0.1011
-0.1761
-0.1184
-0.0482
-0.3260
-0.1555
-0.0169
-0.2373
-0.1015
-0.1051
-0.2738
-0.1917
-0.0503
-0.1098
0.1484
-0.2282
-0.0700
-0.1427
-0.1417
-0.3096
-0.2043
0.0269
-0.0779
-0.0842
-0.0464
-0.1429
-0.3917
0.0257
-0.1779
-0.0993
-0.0507
-0.2222
-0.0951
-0.0861
-0.0743
-0.1666
-0.2054
-0.1782
-0.1150
-0.2525
-0.0694
-0.0536
-0.0499
-0.0311
0.1212
-0.0988
-0.1570
-0.3093
-0.0797
-0.0994
-0.1774
-0.0505
0.0766
-0.0480
-0.1278
-0.0651
-0.1737
0.0303
-0.1334
-0.2435
-0.0746
-0.0365
-0.1843
-0.0887
-0.1924
-0.1110
-0.1458
-0.0895
-0.0956
-0.2042
-0.1338
-0.0637
-0.0699
-0.1656
-0.1521
-0.1317
-0.0826
-0.2470
-0.1174
-0.1475
-0.0840
-0.0681
-0.1789
0.0288
-0.0362
-0.3005
-0.1441
-0.0812
-0.0492
-0.0657
-0.1249
-0.1104
0.0187
-0.1351
-0.1944
-0.0909
0.2067
-0.1081
-0.2499
-0.0999
0.0507
-0.1899
-0.0369
-0.1432
0.1279
-0.1782
-0.1172
-0.0099
0.0785
-0.0681
-0.0365
-0.1596
-0.1606
-0.0922
-0.1773
-0.1788
0.0306
-0.1101
-0.1355
-0.2244
-0.0860
-0.1232
-0.0927
-0.1666
-0.1393
-0.0898
-0.0614
-0.1740
-0.2503
-0.0593
-0.1272
-0.1422
-0.0743
-0.2208
-0.2207
-0.2742
-0.1302
-0.0916
-0.1696
-0.2481
-0.1524
0.0410
-0.1077
0.0408
-0.1915
-0.0697
-0.1049
-0.0110
-0.3257
-0.1336
-0.1021
0.0128
-0.2717
-0.1245
0.0288
-0.1025
-0.2405
-0.1476
0.1008
-0.0220
-0.0983
-0.4417
-0.0774
-0.3207
-0.0272
-0.0726
-0.0608
-0.0430
-0.0872
-0.1280
-0.1608
-0.1529
-0.1745
-0.1702
-0.0486
-0.1459
-0.0552
-0.0808
-0.0264
-0.0952
-0.1126
-0.0452
-0.0837
-0.0331
0.0127
-0.0865
-0.1446
-0.0732
-0.2160
-0.0952
-0.1297
-0.2008
-0.2135
-0.2204
-0.2381
-0.1787
-0.1386
-0.1901
-0.0981
-0.0850
-0.0761
-0.0586
[torch.FloatTensor of size 256]
), ('layer3.0.bn1.running_mean',
-0.1253
-0.2262
-0.4860
-0.1458
-0.6311
0.0073
-0.0597
0.0038
-0.1363
-0.2213
-0.3844
-0.5783
-1.2715
-0.4546
-1.4092
-0.4864
0.2884
-0.7827
0.3060
-0.3542
-0.5711
-0.7998
0.0888
-0.1439
-0.6867
-0.8588
-0.5447
0.2983
-0.1919
0.1344
-0.6387
-0.4716
0.6139
-0.0065
0.0092
-0.7543
-0.3666
-0.1479
-0.7263
-0.3064
-0.3003
-0.4880
-0.3688
-0.3295
-0.1466
-0.6681
-0.1217
-0.5661
-0.7542
-0.4977
-0.1982
-0.7480
0.2935
-0.5039
-0.4152
-0.1846
-0.0653
-0.3617
0.0979
-0.0989
-0.8747
-0.6866
-0.2850
-0.1807
-0.7564
0.4896
-0.4719
-0.3251
0.2361
-0.2823
-0.5454
-0.5703
-0.3914
-0.7459
-0.3127
0.4983
-0.4290
0.0501
-0.1465
-0.6060
0.3132
-0.3743
-0.5826
-0.3843
-0.1076
-0.5657
-0.3102
0.3179
-0.7787
-0.0326
-0.4723
-0.5669
-0.0142
-0.5974
-0.3175
-0.9361
-0.1838
0.1329
-1.0321
-0.0591
-0.4599
-0.5094
0.2070
-0.0520
0.1508
-0.8619
-0.0878
-0.8132
-0.3859
-0.2299
-0.6100
-0.2246
-0.3464
-0.9515
0.0855
0.3101
-0.4721
-0.4155
0.0080
-0.1732
-0.6501
-0.6203
-0.1372
-0.1522
-0.2870
-0.4941
0.0966
0.5073
-0.2510
-0.3032
-0.3150
-0.5733
-0.0545
-0.3441
-0.7644
-0.2321
-0.7738
-0.1745
0.2423
-0.3351
-0.1296
-0.5125
-0.1101
-0.8768
-0.2860
-0.3560
-0.1244
-0.2997
-0.1577
-0.3160
-0.1748
0.5893
0.1252
-0.2802
-0.0514
-0.6605
-0.1989
-0.1062
-0.0844
-0.6724
-0.0008
-0.2606
-0.3828
-0.1674
-1.4552
-0.4452
-0.2158
-0.5878
-0.4179
-0.6215
0.1737
-0.5887
-0.5720
0.0747
-0.6005
-0.3461
-0.3260
-0.3577
-0.0933
-0.3588
-0.3935
-0.9551
-0.9143
-0.2762
-0.3652
-0.1704
-0.2676
-0.2292
-0.3800
-0.4927
-0.2178
-0.3614
-0.1274
-0.5203
-0.5437
0.0210
-0.6357
-0.5927
-0.1611
-0.1015
-0.4067
-0.4212
-0.2671
-0.3272
-0.4998
0.0105
-0.3977
-0.4612
-0.0671
0.1528
-0.1927
-0.4018
-0.5817
-0.3383
-0.5079
-0.6062
-0.2094
0.0344
0.0049
-0.0074
-0.8431
-0.8824
-0.3549
-0.2095
-0.4937
-0.2907
-0.4414
-0.4896
0.0836
-0.9780
-0.4721
-0.1474
-0.3185
-0.2436
-0.1797
-0.0429
-0.2972
-0.4299
-0.3125
-0.3699
-0.4899
-0.0979
-0.7804
-0.3924
0.0850
-0.5030
-0.6755
-0.2506
-0.4354
-0.2441
0.0193
-0.3442
-0.6758
-0.4484
-0.1628
-0.6801
[torch.FloatTensor of size 256]
), ('layer3.0.bn1.running_var',
0.2509
0.2989
0.2810
0.2616
0.3038
0.3614
0.1749
0.2615
0.2315
0.2593
0.3199
0.2039
0.3937
0.2819
0.6928
0.1669
0.1971
0.2347
0.1798
0.2584
0.2045
0.2247
0.2575
0.1896
0.2243
0.3290
0.2262
0.1629
0.1750
0.2162
0.2686
0.1990
0.3028
0.2474
0.6300
0.2747
0.2340
0.2184
0.3476
0.1966
0.1739
0.2011
0.1882
0.1917
0.2349
0.1796
0.2018
0.1950
0.2186
0.2595
0.1522
0.2088
0.1380
0.5258
0.1659
0.3283
0.1931
0.2347
0.1449
0.2613
0.2720
0.1855
0.2469
0.2337
0.2525
0.1487
0.1740
0.2101
0.3507
0.1668
0.2851
0.1874
0.1725
0.2619
0.1903
0.2774
0.1875
0.2584
0.1635
0.2693
0.1709
0.7093
0.2264
0.2439
0.2717
0.2020
0.2420
0.1979
0.3249
0.2325
0.2174
0.2400
0.2201
0.1914
0.2311
0.4723
0.2749
0.2033
0.4373
0.2124
0.1956
0.1570
0.2497
0.2723
0.1928
0.2726
0.1942
0.2862
0.2731
0.2348
0.3259
0.3079
0.2799
0.1865
0.2416
0.2262
0.3502
0.2169
0.2371
0.1750
0.2822
0.1983
0.3979
0.2380
0.1798
0.2661
0.1640
0.4260
0.2032
0.1764
0.1802
0.2821
0.4783
0.1895
0.3361
0.2009
0.1541
0.2021
0.2365
0.3530
0.1833
0.6131
0.1840
0.2772
0.2735
0.1799
0.4005
0.2144
0.2677
0.2665
0.4213
0.2373
0.2408
0.2575
0.3893
0.1723
0.3173
0.2014
0.5098
0.2254
0.2103
0.3155
0.3065
0.1814
0.2512
0.1665
0.2078
0.2352
0.2161
0.1674
0.4302
0.3045
0.3518
0.1620
0.2234
0.2028
0.1523
0.3315
0.2086
0.3005
0.2760
0.1988
0.1683
0.2111
0.3077
0.2803
0.3045
0.1773
0.1797
0.1470
0.2122
0.2147
0.1688
0.1913
0.2067
0.2444
0.2609
0.2750
0.2597
0.2373
0.2216
0.3981
0.7746
0.2015
0.1734
0.3637
0.1748
0.2495
0.2457
0.1559
0.2741
0.3765
0.2767
0.2841
0.2553
0.1582
0.3328
0.1996
0.2284
0.2720
0.2520
0.2724
0.1931
0.2924
0.2629
0.3760
0.2206
0.2616
0.1907
0.2821
0.2752
0.2303
0.2730
0.2340
0.2235
0.1466
0.2869
0.2763
0.2823
0.1843
0.1804
0.2244
0.1840
0.1446
0.2126
0.1792
0.2546
0.1661
0.1881
0.1667
0.2371
0.2523
0.2260
0.2728
0.2028
0.4802
[torch.FloatTensor of size 256]
), ('layer3.0.conv2.weight',
( 0 , 0 ,.,.) =
-9.2775e-03 -3.3897e-02 -1.1927e-02
-2.4595e-02 -7.9761e-02 -4.8709e-02
-4.3490e-02 -8.0118e-02 -6.5252e-02
( 0 , 1 ,.,.) =
-2.8918e-02 2.3763e-04 -2.8561e-02
9.8557e-03 1.0253e-02 -1.7677e-02
-1.0684e-02 2.8071e-03 -1.2483e-02
( 0 , 2 ,.,.) =
-1.4730e-02 2.2622e-02 4.4314e-03
1.5512e-02 1.0901e-02 -4.0294e-03
-2.0756e-02 -1.8048e-02 -1.7258e-02
...
( 0 ,253,.,.) =
3.1821e-04 -4.0924e-03 -7.9885e-04
-2.1664e-02 -2.2339e-02 -2.9870e-02
1.0453e-02 3.4707e-03 -1.1426e-02
( 0 ,254,.,.) =
9.6516e-03 1.8361e-02 3.7035e-02
3.7147e-03 1.0427e-02 1.5162e-02
8.4325e-03 1.8343e-02 3.0159e-02
( 0 ,255,.,.) =
1.3859e-03 8.4181e-03 9.7185e-03
2.6455e-02 4.1474e-02 5.5292e-02
1.6905e-02 6.1027e-02 5.6296e-02
⋮
( 1 , 0 ,.,.) =
1.1743e-02 1.6508e-02 5.1232e-03
2.9441e-02 2.0441e-02 2.1624e-02
7.8852e-03 1.3290e-02 1.1664e-02
( 1 , 1 ,.,.) =
-1.5315e-02 -2.1319e-02 -8.9703e-03
-2.9171e-02 -5.1600e-02 -4.3605e-02
-4.5486e-03 -3.7239e-02 -4.2013e-02
( 1 , 2 ,.,.) =
3.0217e-04 3.9781e-02 -1.4889e-04
1.2860e-02 3.3156e-02 1.6254e-02
-9.5886e-03 -5.6529e-03 -1.6966e-02
...
( 1 ,253,.,.) =
4.4662e-02 8.1982e-03 1.6867e-02
-6.6190e-03 -3.7080e-02 -5.9346e-03
-2.3913e-02 -6.0699e-02 -2.8947e-02
( 1 ,254,.,.) =
-5.7020e-03 -4.2262e-02 -2.1947e-02
-2.2780e-02 -3.1428e-02 -5.8322e-02
-1.9598e-02 -5.2995e-02 -4.8502e-02
( 1 ,255,.,.) =
6.4948e-03 3.2666e-03 9.3442e-03
1.0466e-03 -4.9306e-03 -1.1003e-02
-1.5981e-02 -1.0119e-02 -1.4555e-02
⋮
( 2 , 0 ,.,.) =
-6.1149e-03 -6.6849e-03 -6.9256e-03
-5.1692e-03 -8.9064e-03 -1.4313e-02
-1.1450e-02 -1.7125e-02 -2.3729e-02
( 2 , 1 ,.,.) =
3.9899e-02 1.6684e-02 2.0991e-02
1.6498e-02 -2.6236e-02 -1.1630e-02
5.9030e-03 -2.0597e-02 -1.5280e-02
( 2 , 2 ,.,.) =
6.0228e-03 2.4200e-02 2.0716e-02
4.9551e-03 -6.1590e-03 1.4790e-02
9.8595e-03 -2.7931e-02 -5.4261e-03
...
( 2 ,253,.,.) =
-9.7426e-03 -1.6989e-03 -1.0106e-02
-6.1351e-04 4.3355e-02 3.8143e-02
3.7943e-03 4.4980e-02 3.9165e-02
( 2 ,254,.,.) =
1.3395e-02 6.9187e-03 1.9631e-02
6.7533e-03 2.5027e-02 1.5162e-02
1.7857e-03 -4.3971e-03 3.7016e-03
( 2 ,255,.,.) =
-1.7440e-02 -1.6260e-02 -2.4000e-02
-1.9716e-02 -1.7364e-02 -1.7828e-02
-3.0010e-02 -1.3697e-02 -2.1068e-02
...
⋮
(253, 0 ,.,.) =
6.5624e-03 6.0837e-03 2.8446e-02
-1.2967e-02 -5.0910e-02 -2.0435e-02
-1.5419e-02 -1.4899e-02 -1.8056e-02
(253, 1 ,.,.) =
1.2341e-02 3.2479e-02 2.2650e-02
-4.2432e-03 -1.8113e-02 2.2224e-03
2.9012e-03 -1.7405e-02 3.1869e-03
(253, 2 ,.,.) =
-1.0992e-02 1.1080e-02 -1.4198e-02
8.2258e-03 3.0135e-02 4.1601e-02
6.0791e-04 1.6776e-04 2.1328e-02
...
(253,253,.,.) =
-7.5068e-04 2.6565e-02 1.1820e-02
-1.5916e-02 -7.4243e-03 -5.1214e-03
4.1732e-03 -6.8548e-03 -7.3191e-03
(253,254,.,.) =
-6.9767e-03 9.7686e-04 1.8935e-03
6.0631e-03 5.0983e-02 -3.4937e-03
-8.1496e-03 -3.0339e-02 -1.7409e-02
(253,255,.,.) =
-1.0048e-02 3.2093e-04 -1.1435e-03
-1.5435e-03 -2.9689e-02 -1.9539e-02
-9.6000e-04 4.8948e-03 1.5117e-02
⋮
(254, 0 ,.,.) =
1.6080e-02 1.2594e-02 5.4767e-03
-1.3241e-02 -1.9564e-02 -2.0807e-02
-7.7261e-03 -2.3040e-02 -2.0197e-02
(254, 1 ,.,.) =
-1.8947e-03 5.3025e-02 1.3421e-02
2.7344e-03 2.4908e-02 1.6726e-02
-1.9196e-02 -1.8768e-02 -1.9954e-02
(254, 2 ,.,.) =
8.0703e-03 2.9987e-02 5.7642e-04
3.5938e-03 2.5408e-02 -1.0444e-02
-9.6803e-04 -1.9317e-02 -1.2085e-02
...
(254,253,.,.) =
1.6295e-02 2.5060e-02 2.8950e-02
-7.3188e-03 -1.4100e-03 1.2378e-02
-2.1144e-02 -3.4673e-02 -1.9507e-02
(254,254,.,.) =
1.6469e-02 5.1930e-02 4.9364e-02
5.8284e-03 1.9868e-02 3.6292e-02
-4.9320e-03 -1.6470e-02 -1.2967e-02
(254,255,.,.) =
-1.0214e-02 -3.0802e-02 -3.4004e-02
5.5274e-03 -1.0925e-02 4.6995e-04
3.8212e-02 2.0936e-02 3.2566e-02
⋮
(255, 0 ,.,.) =
1.8364e-02 -3.0699e-03 1.0348e-02
-7.2351e-03 -1.2742e-03 -6.9527e-03
2.1686e-02 1.1490e-03 -3.2707e-03
(255, 1 ,.,.) =
-1.6594e-02 1.5176e-04 -9.1776e-03
1.5036e-02 5.8408e-02 2.1840e-02
-1.3606e-02 1.8126e-02 1.6354e-02
(255, 2 ,.,.) =
2.1872e-02 3.1581e-02 1.8289e-02
-2.1028e-03 -1.5633e-02 2.0265e-02
5.2924e-03 4.8438e-04 1.5701e-02
...
(255,253,.,.) =
4.4712e-03 -2.4757e-03 1.7267e-03
-5.2339e-03 -8.8001e-03 1.3738e-02
-1.0695e-02 1.0347e-03 1.6962e-02
(255,254,.,.) =
-5.9934e-03 -3.6803e-02 3.0996e-03
1.0224e-02 2.9117e-02 -7.3036e-04
9.9051e-03 5.9974e-02 2.7242e-02
(255,255,.,.) =
-9.1759e-03 -1.8297e-02 6.2411e-03
-3.1871e-02 -2.9350e-02 -1.4883e-02
-1.4808e-02 -1.2348e-02 -2.3609e-02
[torch.FloatTensor of size 256x256x3x3]
), ('layer3.0.bn2.weight',
0.3212
0.2124
0.2661
0.3594
0.2785
0.2582
0.3108
0.3096
0.3348
0.2992
0.2545
0.2458
0.3133
0.4159
0.2997
0.3070
0.3135
0.4418
0.3743
0.2570
0.2943
0.3078
0.2738
0.3948
0.2928
0.3572
0.3435
0.5379
0.4243
0.3908
0.2745
0.2798
0.3217
0.1956
0.2751
0.3187
0.3507
0.2751
0.1919
0.3307
0.2850
0.3038
0.2179
0.2652
0.2944
0.2138
0.2184
0.2948
0.3262
0.3759
0.2557
0.3796
0.2950
0.3386
0.3243
0.3070
0.3331
0.2302
0.3036
0.3377
0.2922
0.2204
0.3267
0.3198
0.4023
0.2987
0.4860
0.2854
0.2716
0.4341
0.2834
0.2296
0.2507
0.3120
0.3673
0.3244
0.3380
0.3272
0.2868
0.2877
0.3210
0.2332
0.3379
0.2767
0.2942
0.2672
0.4401
0.2908
0.3771
0.2789
0.3056
0.3276
0.3871
0.2453
0.2559
0.2783
0.3168
0.3410
0.2318
0.3577
0.5036
0.3557
0.2475
0.1852
0.2273
0.3602
0.2919
0.3928
0.4423
0.2052
0.2524
0.2189
0.4113
0.3611
0.4284
0.2333
0.3504
0.7001
0.3754
0.2874
0.3702
0.3174
0.3640
0.2889
0.4155
0.2479
0.2898
0.3740
0.4926
0.2808
0.2388
0.3473
0.1868
0.2837
0.3090
0.3614
0.2797
0.6871
0.2854
0.2937
0.3128
0.4863
0.2193
0.2871
0.2554
0.4175
0.3044
0.3230
0.3343
0.4947
0.3924
0.2264
0.2657
0.4193
0.3483
0.3551
0.2877
0.2559
0.2459
0.2775
0.3842
0.2949
0.3510
0.1926
0.3101
0.3417
0.3931
0.3918
0.3239
0.2851
0.4583
0.2669
0.2663
0.4433
0.3221
0.3655
0.3336
0.4393
0.3970
0.3727
0.3523
0.3586
0.3286
0.4181
0.2955
0.3050
0.2988
0.4320
0.2309
0.3826
0.2270
0.2228
0.3206
0.3273
0.2627
0.3087
0.2920
0.2328
0.4144
0.4075
0.3264
0.3583
0.3014
0.3150
0.4438
0.4042
0.2028
0.3855
0.2570
0.2361
0.2343
0.3312
0.2303
0.3744
0.4727
0.3601
0.2754
0.1987
0.3027
0.3427
0.2994
0.2533
0.2639
0.3460
0.3847
0.4368
0.3786
0.3123
0.2591
0.3979
0.2577
0.3131
0.2934
0.3027
0.2942
0.2266
0.2806
0.2977
0.1858
0.2788
0.2504
0.3948
0.3496
0.2429
0.2155
0.2683
0.4100
0.3495
0.4243
0.2627
0.3329
0.2849
0.3924
0.3728
0.2655
0.3338
[torch.FloatTensor of size 256]
), ('layer3.0.bn2.bias',
-0.0264
0.0995
-0.0068
-0.0877
0.0078
0.0407
-0.0307
0.0060
0.0017
0.0478
0.0630
0.0358
-0.0504
0.0214
-0.0090
-0.0337
-0.0455
-0.1924
-0.0676
0.0775
-0.0340
-0.0799
0.1314
-0.1273
-0.0628
-0.0055
-0.0915
-0.1757
-0.0083
-0.0945
0.0025
-0.0319
-0.0158
0.1437
-0.0035
0.0108
-0.0511
0.0358
0.0878
-0.0452
-0.0458
0.0147
0.0687
0.0168
-0.0477
0.0568
0.0460
-0.0507
0.0059
-0.1034
0.0103
-0.1052
-0.0166
-0.0192
-0.0345
0.0201
-0.1362
0.0396
-0.0088
-0.0108
-0.0298
0.0721
-0.0669
-0.0094
-0.0310
-0.0267
-0.1418
0.1190
0.0669
-0.2137
0.0427
0.0478
0.0339
0.0001
-0.1482
-0.0237
-0.0743
-0.0684
-0.0201
0.0147
-0.0396
0.0194
-0.0696
-0.0558
0.0080
0.0236
-0.2578
0.0064
-0.1004
0.0280
0.0152
-0.0484
-0.1536
0.1049
0.0499
0.0657
-0.0541
0.0077
0.0941
-0.0200
-0.2356
-0.0623
0.0334
0.1102
0.0770
-0.0325
0.0481
-0.1499
-0.1650
0.1230
0.0712
0.0589
-0.0482
-0.0972
-0.1860
0.0853
-0.0516
-0.3080
-0.0604
-0.0771
-0.2728
0.0289
-0.1328
0.0173
-0.0392
0.0542
-0.0372
-0.1528
-0.1766
0.0839
0.0693
-0.0826
0.1118
-0.0508
-0.0448
-0.0375
0.0304
-0.3782
0.0149
0.0068
-0.0521
-0.2950
0.0899
0.0296
0.0199
-0.0835
-0.0964
-0.0238
0.0349
-0.2663
-0.1618
0.0736
0.0276
-0.1109
-0.0103
-0.0975
0.0140
0.0108
0.0784
0.0131
-0.0395
0.0248
-0.0774
-0.0284
0.0104
-0.0423
-0.1663
-0.0949
-0.0343
0.0455
-0.3000
-0.0069
0.0141
-0.2615
-0.0736
-0.1063
-0.0105
-0.0712
-0.1034
-0.0298
-0.1428
-0.0517
-0.0571
-0.0544
-0.0423
-0.0085
0.0159
-0.0654
-0.0613
-0.1450
0.0399
0.0816
-0.0078
-0.0341
0.0320
-0.0448
-0.0703
0.1021
-0.1799
-0.2117
-0.0598
-0.1160
0.0393
-0.0454
-0.1845
-0.1085
0.0558
-0.0636
0.0168
0.0002
0.0799
-0.0672
0.0798
-0.0040
-0.1902
0.0200
0.0732
0.1032
-0.0264
0.0240
-0.0442
0.0229
0.0234
-0.0235
0.0105
-0.2149
-0.1281
-0.0183
-0.0006
-0.0516
0.0566
-0.0543
0.0141
-0.0499
0.0673
0.0517
-0.0040
0.0351
0.0828
0.0100
0.0592
-0.2043
-0.0762
0.0414
0.0775
0.0760
-0.1592
-0.0836
-0.1663
0.0023
-0.0685
0.0381
-0.0987
-0.0203
0.0154
-0.1055
[torch.FloatTensor of size 256]
), ('layer3.0.bn2.running_mean',
-0.1898
-0.4822
-0.0088
0.0064
-0.3401
-0.1041
-0.1626
-0.2259
-0.1119
-0.3254
-0.0254
-0.2351
-0.0790
-0.3306
-0.0956
-0.0415
-0.3207
-0.0037
-0.1830
-0.1295
-0.2069
-0.2632
-0.1351
-0.1295
-0.2527
-0.0104
-0.0875
-0.3375
-0.5001
-0.1199
-0.1989
-0.2964
-0.1924
-0.2904
-0.0091
-0.0104
0.0738
-0.1760
0.0442
-0.2232
0.0376
-0.1235
-0.0065
-0.2524
-0.0120
0.0555
0.1533
-0.1421
-0.1160
-0.0893
-0.1547
-0.1615
0.0208
-0.3496
-0.1477
-0.6155
-0.1364
-0.0405
-0.3246
-0.1697
0.1694
-0.0662
-0.2076
-0.3969
-0.2936
0.1080
-0.2798
-0.0859
-0.0713
-0.3520
-0.0642
-0.1993
0.0202
-0.3808
-0.0833
-0.1321
-0.3009
-0.1800
0.0824
-0.0532
0.1538
0.1777
0.1837
-0.1972
-0.0083
-0.2135
-0.3881
-0.1686
-0.1149
0.2055
-0.2054
-0.1345
-0.1579
-0.1801
-0.2133
-0.2940
-0.2087
-0.0419
-0.2158
-0.0453
-0.2935
-0.1574
0.0310
0.0154
-0.1013
0.0401
-0.4071
-0.2852
-0.2954
-0.2261
-0.1083
0.1359
-0.6190
-0.1957
-0.2018
-0.0181
-0.3157
-0.0974
-0.2188
0.0105
-0.0686
-0.2937
-0.3168
-0.1745
0.0286
-0.1721
-0.2043
-0.2114
-0.2032
-0.2170
0.0459
0.2110
-0.1009
-0.0560
0.1501
-0.1713
0.0171
-0.2029
-0.2175
0.0836
-0.0215
-0.3423
-0.1450
0.1632
-0.1679
-0.1672
-0.1634
-0.3611
-0.0664
-0.3015
-0.1192
0.0192
-0.1420
-0.2852
-0.3039
-0.0897
-0.0659
-0.0240
-0.2212
0.0306
-0.0083
-0.3773
-0.2584
0.0030
-0.0981
-0.2602
-0.1212
-0.2094
-0.1398
-0.1795
-0.1467
0.0102
-0.1396
-0.2732
-0.1427
-0.1136
-0.1668
-0.3346
-0.3108
-0.0469
-0.0733
-0.3828
-0.1082
-0.0854
-0.1564
-0.1707
-0.1396
0.0373
0.2787
-0.2415
-0.1196
-0.1453
-0.2642
-0.1012
0.0470
-0.1133
0.1593
-0.0566
-0.1868
-0.2362
0.0922
0.1657
0.1560
-0.1998
-0.1939
0.1154
0.0537
-0.2192
-0.0997
-0.2332
-0.1498
0.0317
0.0793
-0.2177
-0.2654
-0.2278
-0.0419
0.0142
-0.2111
-0.0224
0.0953
-0.1628
0.0981
-0.1220
-0.0360
-0.3884
0.1147
0.0069
-0.2821
-0.6060
-0.2243
0.0177
-0.0736
-0.1372
-0.0436
0.1616
-0.1906
-0.2774
0.1136
0.1891
0.0610
0.0161
-0.1046
-0.0830
0.0079
-0.0963
-0.1956
-0.1445
-0.1591
0.0612
-0.2552
0.0082
0.1980
-0.2280
-0.1163
-0.1644
[torch.FloatTensor of size 256]
), ('layer3.0.bn2.running_var',
0.1079
0.1131
0.0924
0.0718
0.0979
0.1616
0.0678
0.0953
0.1568
0.1489
0.1486
0.1188
0.1037
0.3279
0.0749
0.0980
0.0845
0.0636
0.0813
0.1006
0.0732
0.0642
0.1933
0.0824
0.0592
0.1489
0.0738
0.0896
0.2559
0.1517
0.0685
0.1045
0.1164
0.1185
0.0899
0.1024
0.0694
0.1614
0.0858
0.0782
0.0630
0.1314
0.1087
0.1018
0.0709
0.0973
0.0638
0.0795
0.0911
0.0772
0.0806
0.0986
0.1304
0.1646
0.1006
0.1233
0.0703
0.1033
0.1296
0.2374
0.0908
0.1296
0.0678
0.1330
0.2067
0.0860
0.1207
0.1573
0.1656
0.0776
0.1666
0.1887
0.1168
0.1144
0.0464
0.1130
0.0565
0.0652
0.1031
0.1098
0.0761
0.1014
0.0627
0.0624
0.0985
0.1319
0.0639
0.0881
0.0646
0.1989
0.2018
0.0689
0.0622
0.1239
0.1009
0.1935
0.0756
0.1753
0.1422
0.1525
0.0735
0.1037
0.0774
0.0733
0.1340
0.0680
0.1867
0.0649
0.0885
0.1279
0.1967
0.1160
0.1611
0.0786
0.0696
0.1040
0.1400
0.0959
0.0994
0.0631
0.0447
0.1600
0.0627
0.1007
0.2316
0.1074
0.0695
0.0526
0.1179
0.2217
0.0784
0.0968
0.0907
0.0670
0.0758
0.0821
0.1580
0.1111
0.0929
0.0876
0.0962
0.0588
0.0759
0.0939
0.0777
0.1199
0.0678
0.1432
0.0874
0.0669
0.0881
0.1013
0.1303
0.0878
0.1138
0.0809
0.0946
0.0940
0.1462
0.1615
0.0843
0.1210
0.0923
0.0445
0.0966
0.1024
0.0637
0.0707
0.0742
0.1396
0.0499
0.1179
0.0688
0.0487
0.0878
0.0864
0.0791
0.1722
0.0996
0.1252
0.0556
0.0761
0.0729
0.1205
0.0550
0.1080
0.1323
0.2279
0.0527
0.0671
0.0955
0.1127
0.1290
0.0872
0.0926
0.0790
0.0589
0.1320
0.0604
0.0704
0.0905
0.0549
0.1623
0.0629
0.0672
0.0640
0.0800
0.1275
0.1319
0.0743
0.1381
0.0659
0.1329
0.1962
0.0738
0.1416
0.1639
0.0966
0.0992
0.0786
0.0766
0.0907
0.0760
0.1000
0.1351
0.0603
0.0603
0.0767
0.0671
0.1331
0.1200
0.0779
0.0832
0.0619
0.1206
0.0986
0.0742
0.0960
0.0676
0.0902
0.1195
0.0415
0.0926
0.1191
0.1055
0.1332
0.0862
0.0865
0.0650
0.0798
0.0661
0.1010
0.1038
0.1005
0.0945
0.0611
[torch.FloatTensor of size 256]
), ('layer3.0.downsample.0.weight',
( 0 , 0 ,.,.) =
8.0862e-03
( 0 , 1 ,.,.) =
-1.9208e-02
( 0 , 2 ,.,.) =
-1.7272e-02
...
( 0 ,125,.,.) =
-1.2758e-02
( 0 ,126,.,.) =
2.5496e-03
( 0 ,127,.,.) =
5.3547e-03
⋮
( 1 , 0 ,.,.) =
-1.4284e-02
( 1 , 1 ,.,.) =
-5.5428e-02
( 1 , 2 ,.,.) =
-3.4568e-02
...
( 1 ,125,.,.) =
2.7476e-02
( 1 ,126,.,.) =
3.5964e-02
( 1 ,127,.,.) =
2.3994e-02
⋮
( 2 , 0 ,.,.) =
7.6148e-03
( 2 , 1 ,.,.) =
2.0725e-02
( 2 , 2 ,.,.) =
-1.0066e-02
...
( 2 ,125,.,.) =
-2.7756e-02
( 2 ,126,.,.) =
6.3956e-03
( 2 ,127,.,.) =
-2.2016e-03
...
⋮
(253, 0 ,.,.) =
3.3605e-02
(253, 1 ,.,.) =
-4.2383e-02
(253, 2 ,.,.) =
2.2568e-02
...
(253,125,.,.) =
-3.3004e-02
(253,126,.,.) =
-9.1010e-04
(253,127,.,.) =
-1.7735e-02
⋮
(254, 0 ,.,.) =
-1.1416e-02
(254, 1 ,.,.) =
-1.8309e-02
(254, 2 ,.,.) =
7.6073e-03
...
(254,125,.,.) =
1.5128e-02
(254,126,.,.) =
3.3239e-02
(254,127,.,.) =
2.0724e-04
⋮
(255, 0 ,.,.) =
6.2636e-03
(255, 1 ,.,.) =
-2.0036e-02
(255, 2 ,.,.) =
1.0343e-03
...
(255,125,.,.) =
-1.9124e-02
(255,126,.,.) =
4.5483e-02
(255,127,.,.) =
7.8252e-03
[torch.FloatTensor of size 256x128x1x1]
), ('layer3.0.downsample.1.weight',
0.0674
0.0514
0.0385
0.1692
0.0604
0.0460
0.1209
0.1110
0.0418
0.0387
0.0442
0.0707
0.0790
0.1094
0.0959
0.0544
0.1032
0.2190
0.0459
0.0372
0.1410
0.0587
0.0360
0.0955
0.1657
0.1024
0.1417
0.0580
0.0536
0.0716
0.0865
0.1110
0.0511
0.0515
0.0809
0.1154
0.0777
0.0449
0.0490
0.1056
0.1457
0.0744
0.0530
0.0600
0.1026
0.0486
0.0408
0.1312
0.0639
0.1062
0.0915
0.1476
0.0900
0.0742
0.1069
0.0776
0.1423
0.0495
0.0974
0.0661
0.1292
0.0548
0.1145
0.0950
0.0921
0.1579
0.0496
0.0236
0.0398
0.0935
0.0291
0.0653
0.0885
0.1190
0.1692
0.0692
0.1316
0.0606
0.0480
0.0654
0.1082
0.0624
0.1103
0.1106
0.1076
0.0400
0.0723
0.0947
0.0662
0.0464
0.0444
0.1727
0.0921
0.0345
0.0451
0.0374
0.0940
0.0818
0.0397
0.0452
0.0985
0.1095
0.1072
0.0506
0.0444
0.0755
0.0420
0.1046
0.1172
0.0447
0.0459
0.0409
0.0539
0.1036
0.0741
0.0311
0.1086
0.1746
0.0777
0.0689
0.1100
0.0489
0.1048
0.1097
0.1025
0.0448
0.0675
0.0707
0.1364
0.0438
0.0346
0.1769
0.0667
0.1155
0.0628
0.0873
0.0406
0.2890
0.0703
0.0428
0.1173
0.1049
0.0611
0.0469
0.0400
0.0744
0.1003
0.1012
0.0599
0.1078
0.1512
0.0322
0.0430
0.0977
0.0951
0.0838
0.0958
0.0448
0.0263
0.0425
0.1154
0.0771
0.1781
0.0300
0.0699
0.0724
0.1600
0.0893
0.1130
0.0534
0.1359
0.0375
0.0809
0.1145
0.1232
0.0942
0.0880
0.0346
0.0996
0.0461
0.0694
0.0630
0.1590
0.0509
0.1254
0.0590
0.0744
0.1084
0.0514
0.0931
0.0848
0.0240
0.0279
0.0993
0.0612
0.0599
0.1095
0.0508
0.0658
0.1162
0.0833
0.1651
0.0505
0.1231
0.1228
0.1038
0.0369
0.0756
0.0415
0.1192
0.0292
0.0839
0.0577
0.0951
0.0944
0.0309
0.0390
0.0604
0.0672
0.0501
0.0383
0.0946
0.0958
0.0501
0.0243
0.1074
0.1908
0.0693
0.1376
0.1151
0.0329
0.0647
0.0616
0.1106
0.0358
0.0721
0.0851
0.0375
0.0368
0.0947
0.0464
0.1666
0.1049
0.0755
0.0398
0.0249
0.1528
0.1167
0.0886
0.0540
0.0726
0.0736
0.0797
0.0854
0.0609
0.1263
[torch.FloatTensor of size 256]
), ('layer3.0.downsample.1.bias',
-0.0264
0.0995
-0.0068
-0.0877
0.0078
0.0407
-0.0307
0.0060
0.0017
0.0478
0.0630
0.0358
-0.0504
0.0214
-0.0090
-0.0337
-0.0455
-0.1924
-0.0676
0.0775
-0.0340
-0.0799
0.1314
-0.1273
-0.0628
-0.0055
-0.0915
-0.1757
-0.0083
-0.0945
0.0025
-0.0319
-0.0158
0.1437
-0.0035
0.0108
-0.0511
0.0358
0.0878
-0.0452
-0.0458
0.0147
0.0687
0.0168
-0.0477
0.0568
0.0460
-0.0507
0.0059
-0.1034
0.0103
-0.1052
-0.0166
-0.0192
-0.0345
0.0201
-0.1362
0.0396
-0.0088
-0.0108
-0.0298
0.0721
-0.0669
-0.0094
-0.0310
-0.0267
-0.1418
0.1190
0.0669
-0.2137
0.0427
0.0478
0.0339
0.0001
-0.1482
-0.0237
-0.0743
-0.0684
-0.0201
0.0147
-0.0396
0.0194
-0.0696
-0.0558
0.0080
0.0236
-0.2578
0.0064
-0.1004
0.0280
0.0152
-0.0484
-0.1536
0.1049
0.0499
0.0657
-0.0541
0.0077
0.0941
-0.0200
-0.2356
-0.0623
0.0334
0.1102
0.0770
-0.0325
0.0481
-0.1499
-0.1650
0.1230
0.0712
0.0589
-0.0482
-0.0972
-0.1860
0.0853
-0.0516
-0.3080
-0.0604
-0.0771
-0.2728
0.0289
-0.1328
0.0173
-0.0392
0.0542
-0.0372
-0.1528
-0.1766
0.0839
0.0693
-0.0826
0.1118
-0.0508
-0.0448
-0.0375
0.0304
-0.3782
0.0149
0.0068
-0.0521
-0.2950
0.0899
0.0296
0.0199
-0.0835
-0.0964
-0.0238
0.0349
-0.2663
-0.1618
0.0736
0.0276
-0.1109
-0.0103
-0.0975
0.0140
0.0108
0.0784
0.0131
-0.0395
0.0248
-0.0774
-0.0284
0.0104
-0.0423
-0.1663
-0.0949
-0.0343
0.0455
-0.3000
-0.0069
0.0141
-0.2615
-0.0736
-0.1063
-0.0105
-0.0712
-0.1034
-0.0298
-0.1428
-0.0517
-0.0571
-0.0544
-0.0423
-0.0085
0.0159
-0.0654
-0.0613
-0.1450
0.0399
0.0816
-0.0078
-0.0341
0.0320
-0.0448
-0.0703
0.1021
-0.1799
-0.2117
-0.0598
-0.1160
0.0393
-0.0454
-0.1845
-0.1085
0.0558
-0.0636
0.0168
0.0002
0.0799
-0.0672
0.0798
-0.0040
-0.1902
0.0200
0.0732
0.1032
-0.0264
0.0240
-0.0442
0.0229
0.0234
-0.0235
0.0105
-0.2149
-0.1281
-0.0183
-0.0006
-0.0516
0.0566
-0.0543
0.0141
-0.0499
0.0673
0.0517
-0.0040
0.0351
0.0828
0.0100
0.0592
-0.2043
-0.0762
0.0414
0.0775
0.0760
-0.1592
-0.0836
-0.1663
0.0023
-0.0685
0.0381
-0.0987
-0.0203
0.0154
-0.1055
[torch.FloatTensor of size 256]
), ('layer3.0.downsample.1.running_mean',
-0.1077
-0.1229
-0.0681
-0.1930
-0.0571
-0.0224
-0.0338
-0.2437
-0.0447
0.0452
0.0008
0.0606
-0.0686
-0.0411
0.0435
-0.0873
-0.2157
-0.1593
-0.0157
-0.0698
-0.1796
-0.0204
0.0443
-0.1573
-0.0407
-0.1830
0.0180
-0.0895
-0.0434
-0.2033
-0.0171
0.1442
-0.0797
-0.1848
-0.0201
-0.0438
-0.1435
-0.0157
0.0630
-0.0223
-0.1470
-0.0833
-0.1568
0.0180
0.0083
0.1125
-0.0936
0.0647
-0.1352
-0.1372
0.1363
-0.1031
-0.1675
-0.2070
-0.0078
-0.0178
0.1123
-0.0876
-0.1877
0.0247
-0.2548
-0.1413
-0.0916
-0.1613
0.0087
-0.2045
-0.0420
-0.0763
-0.0522
-0.0029
-0.0424
0.1541
0.0664
-0.0733
-0.0935
-0.0226
-0.1797
0.0129
0.0465
-0.1008
-0.0652
0.0088
-0.0120
0.0576
-0.0571
-0.0667
-0.0228
-0.0880
-0.0192
0.0915
0.0212
-0.2866
-0.1851
0.0631
-0.0325
0.0106
-0.0163
-0.1375
-0.0208
0.0400
0.0382
-0.1582
-0.0242
-0.0104
-0.0253
-0.0071
-0.0822
-0.0029
0.0168
-0.1328
-0.0639
0.0832
-0.0666
-0.0080
-0.0459
0.0450
-0.1013
-0.0630
-0.0629
-0.1361
-0.0500
-0.0488
0.1090
-0.0700
-0.0801
-0.1030
-0.0278
-0.1242
-0.0585
-0.0263
-0.0371
-0.0678
-0.1683
0.0122
0.0382
-0.0072
-0.0424
-0.0864
-0.0058
0.0969
-0.2232
0.0251
-0.0298
-0.0126
0.0531
-0.2009
-0.2212
-0.0022
0.0396
-0.0029
-0.1377
0.0226
0.0195
-0.1747
0.0619
-0.1538
-0.0896
0.0758
-0.0461
0.0011
-0.0787
-0.0877
-0.2203
-0.0431
0.0743
-0.1289
-0.0960
-0.0841
0.0581
-0.1059
-0.1513
-0.0834
0.0357
-0.0917
0.0641
0.0595
-0.1210
-0.0276
0.0376
-0.0100
-0.1370
-0.0962
-0.2814
-0.1033
-0.0437
-0.0338
-0.0591
-0.0691
-0.0883
-0.0485
0.0748
-0.0378
-0.0478
0.0189
-0.0546
-0.0145
0.0332
-0.0593
0.0225
-0.1374
-0.1181
0.0559
0.0222
-0.0021
-0.0954
-0.0417
0.0799
-0.1447
-0.0297
-0.0955
0.0598
0.0732
-0.0074
0.0402
-0.0222
0.0747
0.0112
0.1270
-0.0274
-0.0054
0.0149
-0.0263
-0.0373
-0.0971
0.0749
-0.1377
-0.1877
-0.0638
-0.1487
-0.0099
-0.0275
0.0011
-0.0404
0.0556
-0.1120
-0.1673
0.0402
-0.1795
0.0676
0.0220
-0.0813
0.0919
-0.0402
0.0192
0.0033
-0.0220
-0.1673
-0.1087
-0.1025
0.0476
-0.1374
0.0058
-0.0772
-0.0445
0.0144
-0.1149
[torch.FloatTensor of size 256]
), ('layer3.0.downsample.1.running_var',
1.00000e-02 *
1.3212
1.3382
0.6095
2.3173
1.0613
1.3040
1.8071
2.2225
0.5863
0.8064
0.9384
2.0050
1.3193
4.3016
1.6614
0.8651
1.6540
2.5693
0.8004
0.5019
2.7886
0.3852
0.7347
1.5154
2.1317
2.9001
2.1060
0.6044
1.1943
1.3458
1.3165
2.4103
1.0533
1.7815
1.4061
2.1802
0.9886
1.1141
1.2210
1.3257
1.8708
1.6071
1.4969
1.2958
1.1608
0.9993
0.6997
2.5080
1.0482
1.4110
1.8211
2.3854
2.4584
2.0267
1.9324
1.6686
2.3925
1.3504
1.9314
2.4688
2.2756
1.3571
1.4856
2.4605
2.7555
2.8775
0.8696
0.6599
1.0593
1.0700
0.9298
1.9956
2.2161
2.2084
1.6220
1.1626
1.5926
0.7989
1.0761
1.2732
2.0618
1.4437
1.5537
1.8481
2.0876
0.9675
0.4506
1.4136
0.8490
1.2861
1.2262
3.4415
1.4798
0.5757
0.8053
0.8657
1.2911
2.3058
1.4168
0.9986
1.1963
1.5742
2.0729
0.9315
1.1655
1.1168
1.1475
1.6534
1.7462
0.9781
1.7381
1.0939
1.1043
1.3494
0.7770
0.5724
1.8667
1.0200
1.1307
0.8269
0.9879
1.1117
1.4385
2.3246
2.5138
0.9115
1.0578
0.7320
1.6562
1.4827
0.5685
3.4491
1.9896
2.0521
0.9075
1.2675
1.0924
2.6108
1.0890
0.6949
2.2735
0.7547
1.6364
1.0205
0.7436
1.3534
1.5288
2.5751
0.7504
0.9168
2.3543
0.7190
0.8317
1.2518
1.4798
1.1023
1.5361
0.4985
0.6109
0.9817
1.3473
1.6659
2.8802
0.3228
1.5283
1.3065
1.9141
1.0007
2.0190
1.4825
1.1234
0.6844
1.4365
0.9743
2.1293
1.5214
1.3259
0.6814
1.2939
0.6188
0.6421
0.7311
2.2736
0.8418
1.9729
1.0084
1.9774
2.2133
0.7438
1.1675
2.6822
0.6403
0.8460
1.2970
1.1952
0.9338
1.1365
1.6780
0.8185
1.0743
1.6519
2.0700
0.9490
1.9632
1.3310
1.1290
0.5067
1.5383
1.1103
2.8381
1.1821
1.1883
1.5355
2.1017
1.1177
1.0712
0.7522
1.4946
1.2895
0.5904
0.4068
2.0167
1.5424
1.0638
0.3425
1.1384
3.0723
0.7040
3.0110
1.8263
0.6956
0.5911
0.8097
1.8654
0.6678
1.8846
1.3750
0.6798
0.8424
1.5255
1.4950
1.3587
2.0014
1.5513
0.9465
0.5946
2.3052
1.8731
0.9167
0.9334
1.4370
1.1357
1.1465
1.0277
1.0530
1.7208
[torch.FloatTensor of size 256]
), ('layer3.1.conv1.weight',
( 0 , 0 ,.,.) =
4.8367e-02 4.8045e-02 3.8471e-02
4.9888e-02 5.5208e-02 5.6701e-02
2.4192e-02 1.3436e-02 2.4655e-02
( 0 , 1 ,.,.) =
-3.6542e-03 -3.1100e-03 4.9227e-03
-1.2114e-03 3.4020e-03 1.9846e-02
-2.1704e-02 -2.1158e-02 -2.8686e-03
( 0 , 2 ,.,.) =
-1.2536e-02 -2.0486e-02 -2.3154e-02
-1.3515e-02 -2.3781e-02 -2.5515e-02
1.0584e-02 7.2999e-03 -5.2329e-03
...
( 0 ,253,.,.) =
-4.3596e-02 -1.8328e-02 -5.0577e-02
1.6590e-02 5.0719e-02 2.1919e-02
-1.9203e-02 -8.8315e-03 -2.0335e-02
( 0 ,254,.,.) =
-7.6949e-03 -1.5848e-02 1.5841e-03
-6.2470e-03 -1.3135e-02 6.9092e-03
-3.3791e-03 1.7889e-03 3.7373e-03
( 0 ,255,.,.) =
-6.6310e-03 5.8503e-03 -5.8571e-04
-2.4600e-02 -8.9747e-03 -7.2466e-03
-1.7566e-02 -8.5829e-03 -7.5220e-03
⋮
( 1 , 0 ,.,.) =
-2.3679e-02 -9.4399e-03 -1.1688e-02
-2.4777e-02 -1.7326e-02 -3.1489e-02
-3.3683e-03 9.7571e-03 -5.1527e-03
( 1 , 1 ,.,.) =
-3.0809e-02 -4.0685e-02 -2.2731e-02
-5.1065e-03 -1.6457e-02 -1.8804e-02
5.0382e-02 5.2054e-02 3.9185e-02
( 1 , 2 ,.,.) =
-3.7790e-02 -4.2234e-02 -2.9703e-02
-6.4766e-03 2.6967e-03 -8.1736e-03
3.7747e-02 5.5416e-02 2.5806e-02
...
( 1 ,253,.,.) =
-2.7275e-02 -4.5364e-02 -3.9567e-02
8.9827e-03 1.6150e-02 1.1675e-02
-9.7209e-03 -3.6449e-02 -1.6842e-02
( 1 ,254,.,.) =
1.7824e-02 1.5013e-02 1.0225e-02
5.4044e-03 1.1664e-02 6.4623e-03
2.1803e-02 4.1795e-02 1.9234e-02
( 1 ,255,.,.) =
-2.6730e-04 1.5218e-03 -5.0352e-03
2.5761e-02 2.7110e-02 -9.3395e-04
-1.1949e-02 -7.5204e-03 -3.9370e-03
⋮
( 2 , 0 ,.,.) =
-1.7447e-02 -1.8358e-02 -2.6020e-02
-1.4074e-02 -1.1302e-02 -1.4814e-02
-3.1460e-03 -1.8674e-02 -9.3350e-03
( 2 , 1 ,.,.) =
-5.1125e-03 -4.8036e-03 1.8139e-02
-1.0524e-02 -1.5152e-02 2.3904e-03
8.7093e-03 9.3810e-03 2.4203e-03
( 2 , 2 ,.,.) =
-7.6392e-03 -8.1496e-03 -1.5331e-02
-8.0622e-03 -1.3383e-02 -1.3938e-02
-1.6904e-02 -3.0059e-02 -1.8659e-02
...
( 2 ,253,.,.) =
1.8390e-02 -2.6080e-03 9.3782e-03
-6.4662e-04 -1.3146e-02 1.0045e-02
-2.2293e-03 -1.4097e-02 1.7385e-02
( 2 ,254,.,.) =
3.0293e-04 2.9622e-03 1.0030e-02
-5.7588e-03 -1.6943e-03 6.9988e-03
9.8134e-03 1.4197e-02 5.9742e-03
( 2 ,255,.,.) =
2.8753e-03 -1.7814e-03 1.0873e-02
1.5230e-02 4.5867e-03 1.6860e-02
1.9536e-03 1.9503e-02 1.2168e-02
...
⋮
(253, 0 ,.,.) =
1.3983e-02 2.4598e-03 -7.4604e-03
-2.2250e-02 -1.2757e-02 -2.8846e-03
-1.0911e-02 7.5499e-03 8.6910e-03
(253, 1 ,.,.) =
-4.8463e-03 -8.3250e-03 1.3420e-02
-6.2502e-03 -7.3982e-03 1.1153e-02
4.0391e-03 -9.0354e-03 -7.5441e-03
(253, 2 ,.,.) =
-5.1627e-03 -8.9529e-03 -1.2414e-02
-4.9261e-03 -3.5488e-03 2.1501e-03
-1.1709e-02 -1.4984e-02 -1.9216e-03
...
(253,253,.,.) =
1.5428e-02 -7.6036e-04 -1.3522e-03
-3.4856e-02 -7.4478e-04 -6.5064e-03
-9.1655e-03 -2.8467e-02 -4.8924e-02
(253,254,.,.) =
1.2207e-02 1.0519e-02 -8.4421e-03
-2.5495e-02 2.8140e-03 1.6165e-03
-1.8831e-02 1.2268e-02 1.5439e-02
(253,255,.,.) =
-1.3684e-02 -4.1732e-03 1.2609e-02
-6.8834e-04 5.9757e-03 -1.0183e-02
2.1559e-04 -1.3462e-02 -3.0114e-02
⋮
(254, 0 ,.,.) =
-1.6186e-02 -6.4926e-02 -4.3146e-02
-2.1790e-02 -4.9106e-02 -3.4568e-02
4.0506e-02 4.2449e-02 6.1562e-02
(254, 1 ,.,.) =
3.5715e-03 -1.0916e-02 -2.2922e-02
-2.4831e-03 6.4555e-03 -1.1316e-02
1.6662e-03 -1.9145e-02 -2.3007e-02
(254, 2 ,.,.) =
-7.1243e-03 -4.2783e-05 4.9363e-03
-1.5832e-02 4.0474e-03 4.5135e-04
-4.7967e-03 -7.2164e-04 -1.7230e-02
...
(254,253,.,.) =
1.1589e-02 7.7814e-04 6.3205e-03
1.1360e-02 -6.2076e-03 -2.7689e-02
2.6392e-02 2.3775e-03 -1.4937e-02
(254,254,.,.) =
-1.1237e-02 -2.6285e-03 9.1537e-03
-8.2120e-03 -2.2236e-02 3.2917e-04
5.5909e-03 -1.3858e-03 6.8947e-03
(254,255,.,.) =
-1.4783e-02 -1.0367e-02 -2.7472e-02
-4.1090e-02 -3.8532e-02 -3.9202e-02
-2.1614e-02 -3.4340e-02 -1.8542e-02
⋮
(255, 0 ,.,.) =
-1.9492e-02 -1.6098e-02 -3.1792e-02
2.5374e-02 4.6815e-02 2.7513e-02
3.5903e-02 3.1892e-02 2.6156e-02
(255, 1 ,.,.) =
1.6856e-02 1.5645e-02 1.4189e-02
2.2550e-02 3.0456e-02 1.6739e-02
-2.3615e-04 -7.9501e-03 -1.9666e-03
(255, 2 ,.,.) =
-7.9060e-03 -4.7390e-03 1.6030e-03
1.3802e-03 -8.5837e-03 6.9451e-03
1.1407e-02 -5.9877e-03 1.3759e-02
...
(255,253,.,.) =
4.0124e-03 2.9951e-02 1.1915e-02
-4.3412e-02 -3.1776e-03 -2.7705e-02
-1.6183e-02 -1.1247e-02 -3.5084e-02
(255,254,.,.) =
2.9837e-02 5.9935e-02 2.4631e-02
-1.9571e-03 2.2415e-02 -1.5499e-02
1.6075e-02 1.7850e-02 -1.8412e-02
(255,255,.,.) =
-4.3712e-03 -4.9032e-02 -2.1335e-02
-5.2598e-03 -2.8579e-02 -2.2090e-02
8.5126e-03 2.0862e-03 2.3301e-02
[torch.FloatTensor of size 256x256x3x3]
), ('layer3.1.bn1.weight',
0.2480
0.1972
0.2279
0.2709
0.3296
0.2640
0.2710
0.3475
0.2388
0.2904
0.2769
0.3045
0.2268
0.2634
0.2999
0.2397
0.2724
0.2723
0.2133
0.3806
0.2767
0.2403
0.2406
0.2917
0.2675
0.2305
0.2394
0.3123
0.2984
0.3353
0.2234
0.1919
0.3168
0.2626
0.2901
0.2918
0.3455
0.2561
0.2434
0.2298
0.3318
0.3481
0.2032
0.2478
0.2478
0.2483
0.3252
0.2567
0.2685
0.1977
0.2541
0.4079
0.2480
0.2076
0.2276
0.2683
0.2098
0.2056
0.2010
0.3560
0.2384
0.3284
0.1952
0.2445
0.2848
0.3742
0.2746
0.2117
0.3859
0.4785
0.3005
0.2848
0.3762
0.2903
0.2126
0.1776
0.2778
0.3878
0.3123
0.1974
0.2679
0.2300
0.2474
0.2320
0.2635
0.2819
0.2296
0.3194
0.3814
0.2503
0.2269
0.2676
0.3431
0.3799
0.3787
0.2968
0.3021
0.2575
0.3007
0.1939
0.1950
0.3217
0.3623
0.2171
0.2486
0.2266
0.2133
0.2851
0.2715
0.2720
0.3107
0.2174
0.2675
0.2387
0.3434
0.2761
0.2084
0.2975
0.3178
0.2818
0.2858
0.3498
0.2675
0.2638
0.3159
0.2879
0.1873
0.2986
0.3584
0.2570
0.1815
0.2758
0.2640
0.2486
0.2567
0.2252
0.3420
0.2910
0.2898
0.2902
0.2404
0.2381
0.3633
0.2690
0.3810
0.2947
0.2743
0.4644
0.3133
0.2444
0.3477
0.3001
0.1977
0.2301
0.2513
0.2660
0.3271
0.1622
0.2274
0.2225
0.3596
0.3215
0.1997
0.2215
0.2706
0.2831
0.2621
0.3710
0.2730
0.2903
0.1893
0.2140
0.2460
0.3141
0.2424
0.3699
0.2364
0.2420
0.2948
0.2497
0.2760
0.2686
0.2895
0.3857
0.1398
0.2832
0.3362
0.2522
0.2823
0.2381
0.2311
0.3274
0.4078
0.2648
0.2525
0.3388
0.3251
0.2420
0.2856
0.3605
0.2603
0.2294
0.2483
0.2171
0.2353
0.4117
0.2588
0.2888
0.1972
0.2408
0.2755
0.3031
0.2457
0.2744
0.3564
0.2546
0.3673
0.2883
0.2590
0.3021
0.2890
0.3505
0.2092
0.2953
0.3222
0.2925
0.2574
0.3012
0.3893
0.2211
0.2226
0.3258
0.3205
0.2975
0.2323
0.3323
0.2812
0.2702
0.2300
0.2846
0.3318
0.2292
0.3498
0.2622
0.3581
0.4003
0.2924
0.3049
0.3478
0.2845
0.2742
0.2019
0.2466
0.2988
0.2044
0.2691
[torch.FloatTensor of size 256]
), ('layer3.1.bn1.bias',
-0.1332
-0.0644
-0.3239
-0.2390
-0.3262
-0.1796
-0.2087
-0.3208
-0.1874
-0.2988
-0.2099
-0.2283
-0.2141
-0.2460
-0.2768
-0.1351
-0.2498
-0.2393
-0.1223
-0.4590
-0.2172
-0.1220
-0.2101
-0.1779
-0.2426
-0.1546
-0.1549
-0.3716
-0.2817
-0.3886
-0.1545
-0.0687
-0.3412
-0.2261
-0.1961
-0.2242
-0.2984
-0.1381
-0.2251
-0.1658
-0.4534
-0.3226
-0.0977
-0.1349
-0.2619
-0.1428
-0.3960
-0.1633
-0.2101
-0.1161
-0.1448
-0.5502
-0.2179
-0.1246
0.0502
-0.1902
-0.1047
-0.1000
-0.1411
-0.3124
-0.2190
-0.3062
-0.1247
-0.1557
-0.2973
-0.3825
-0.1951
-0.1381
-0.5761
-0.3879
-0.2808
-0.2542
-0.3470
-0.2460
-0.1091
-0.0562
-0.1833
-0.4956
-0.3059
-0.0988
-0.2255
-0.1958
-0.1320
-0.1738
-0.2287
-0.1926
-0.0924
-0.3427
-0.5489
-0.2431
-0.1935
-0.1641
-0.2503
-0.3274
-0.4008
-0.2824
-0.2694
-0.1939
-0.2413
-0.0309
-0.0880
-0.3421
-0.3104
-0.1102
-0.1539
-0.1233
-0.1780
-0.2715
-0.2005
-0.1846
-0.2843
-0.1117
-0.1816
-0.2119
-0.3304
-0.2267
-0.1413
-0.3376
-0.2674
-0.2524
-0.2554
-0.4735
-0.2342
-0.2130
-0.3282
-0.1966
-0.1063
-0.2615
-0.4234
-0.1374
-0.0811
-0.3069
-0.1538
-0.1453
-0.1612
-0.1631
-0.3759
-0.2608
-0.2382
-0.2499
-0.1485
-0.1487
-0.4328
-0.1377
-0.2781
-0.2259
-0.2072
-0.4165
-0.3582
-0.1382
-0.3598
-0.2672
-0.2090
-0.0177
-0.1279
-0.2812
-0.3621
0.0476
-0.2232
-0.1272
-0.3237
-0.3008
-0.1119
-0.0839
-0.2426
-0.2000
-0.1873
-0.4685
-0.2000
-0.3462
-0.0706
-0.1973
-0.3548
-0.1975
-0.3537
-0.3546
-0.1433
-0.2052
-0.2722
-0.1528
-0.2798
-0.1945
-0.2474
-0.4910
0.1322
-0.2378
-0.5166
-0.3959
-0.2354
-0.1266
-0.0810
-0.4132
-0.5576
-0.2238
-0.1563
-0.3950
-0.3283
-0.0846
-0.3103
-0.3130
-0.1498
-0.1396
-0.0972
-0.1620
-0.1631
-0.6364
-0.1350
-0.2345
-0.1049
-0.1625
-0.2878
-0.2450
-0.1468
-0.2035
-0.5358
-0.1683
-0.5524
-0.2511
-0.1230
-0.2305
-0.1925
-0.3759
-0.1014
-0.1697
-0.4002
-0.2980
-0.3035
-0.1563
-0.4660
-0.1155
-0.1665
-0.3382
-0.2935
-0.3122
-0.3015
-0.3261
-0.2542
-0.2037
-0.0955
-0.2070
-0.4370
-0.2051
-0.4205
-0.3125
-0.4845
-0.3528
-0.2624
-0.2894
-0.3976
-0.2107
-0.1791
-0.1075
-0.1213
-0.3022
0.0516
-0.1928
[torch.FloatTensor of size 256]
), ('layer3.1.bn1.running_mean',
-0.1025
-0.2592
-0.0965
-0.3407
-0.7097
-0.6031
-0.2141
-0.9031
-0.4035
-0.6407
-0.2497
-0.3583
-0.4565
-0.6490
-0.5170
-0.1349
-0.4850
-0.4739
-0.3153
-0.8209
-0.4225
-0.6206
-0.4559
-0.3368
-0.4792
-0.1458
-0.2748
-0.3980
-0.7130
-0.8394
-0.2738
-0.2684
-0.7559
-0.1212
-0.4390
-0.6971
-0.5034
-0.5667
-0.0881
-0.5308
-1.0779
-0.4657
-0.1016
-0.3251
0.5384
-0.7573
-0.4718
-0.3475
-0.7198
0.0478
-0.1387
-0.8780
-0.4057
-0.1151
0.0992
-0.3888
-1.0015
-0.3866
-0.3267
-0.7324
0.0908
-0.2760
-0.2759
-0.1892
-0.5036
-0.7406
-0.5314
-0.4804
-0.6063
-0.0027
-0.9185
-0.2444
-1.1849
-0.5390
0.0826
-0.5177
-0.2814
-0.9467
-0.3946
-0.2202
-0.9325
-0.2205
-0.5632
-0.3165
-0.3471
-0.5694
-0.5109
-0.5890
-0.7838
-0.0023
-0.2396
-0.1672
-0.8411
-0.7307
-0.7261
-0.9349
-0.3716
-0.3562
-0.5137
0.0200
-0.1683
-0.5633
-0.7860
0.0991
-0.4193
-0.2072
0.3579
-0.4102
-0.3668
-0.4049
-0.9005
-0.2777
-0.2725
0.3941
-0.5075
-0.4530
-0.1478
-0.0221
-0.8574
-0.3104
-0.3454
-0.6428
-0.2709
-0.8565
-0.6260
0.0389
-0.2254
-0.0605
-0.5984
-0.3643
-0.3485
0.0256
-0.6499
-0.3053
-0.2398
-0.3982
-0.7215
-0.6537
0.0768
-0.4554
-0.5362
-0.1021
-0.6464
-0.2658
-0.4985
-0.4319
-0.3855
-0.3943
-0.5918
-0.2335
-0.6772
-0.5162
0.3806
-0.3896
-0.7292
-0.2207
-0.1687
-0.6623
0.2850
0.1726
-0.1974
-0.5586
-0.2524
0.1773
-0.6096
-0.2281
-0.4691
-0.8133
0.0092
-0.4920
-0.1768
0.1288
-0.0693
-0.8397
0.2613
-0.2188
-0.3190
-0.4868
-0.6609
-0.7675
-0.7933
-0.9342
-0.0691
-0.6616
-0.3705
-0.4538
-0.9279
0.3324
-0.4950
0.2669
-0.8139
-0.7909
-0.7892
-0.4672
-0.5917
-0.5463
-0.2896
-0.0226
-0.2722
-0.3747
0.1202
-0.0338
0.1891
-0.2435
-0.0226
-0.7646
-0.4016
-0.3116
1.5396
-0.1373
-0.7532
-0.6100
-0.2439
-0.5319
-0.6603
-0.3199
-0.8471
-0.0484
0.0830
-0.4584
-0.3889
-0.8163
-0.3322
-0.6670
-0.7744
-0.7010
0.2128
-0.5624
-1.0360
-0.3098
0.7000
-0.3580
-0.3484
-0.5161
-0.2167
-0.8903
-0.3402
-0.4037
-0.5501
-0.4197
-0.5290
-0.4974
0.0083
-0.0166
-0.8607
-0.5581
-0.5161
-0.3668
-0.7363
-0.3654
-0.2788
-0.3890
-0.1521
-0.3327
-0.7903
-0.2531
[torch.FloatTensor of size 256]
), ('layer3.1.bn1.running_var',
0.2021
0.2649
0.0816
0.1141
0.1276
0.1230
0.1780
0.1393
0.1235
0.1178
0.1281
0.1505
0.1060
0.0954
0.1147
0.1260
0.0831
0.2214
0.1452
0.1260
0.1203
0.1507
0.1066
0.3038
0.1444
0.1266
0.1966
0.1145
0.1376
0.0991
0.1222
0.1343
0.1066
0.1148
0.1946
0.1589
0.1715
0.1588
0.1577
0.0995
0.1036
0.1305
0.1547
0.1623
0.1437
0.1624
0.0856
0.2183
0.1339
0.0807
0.1528
0.1277
0.1413
0.1200
0.2567
0.1202
0.1523
0.1513
0.1002
0.1453
0.1620
0.1270
0.1179
0.1004
0.2034
0.1578
0.1785
0.1181
0.0674
0.2460
0.1251
0.1144
0.1670
0.1460
0.1625
0.1203
0.1697
0.1065
0.1415
0.1694
0.0909
0.1133
0.1569
0.0880
0.1333
0.1711
0.2421
0.1188
0.0882
0.1084
0.1373
0.2886
0.1736
0.1740
0.1512
0.1086
0.1211
0.1523
0.1453
0.1735
0.1515
0.1348
0.2445
0.1433
0.1422
0.1520
0.0985
0.1292
0.1372
0.2228
0.1265
0.1538
0.1600
0.1121
0.1922
0.1195
0.1100
0.1151
0.1431
0.1258
0.1416
0.1049
0.1840
0.1158
0.1111
0.2187
0.1193
0.1541
0.1074
0.1350
0.1385
0.0990
0.1418
0.1837
0.1667
0.1712
0.1567
0.1542
0.1501
0.1585
0.1422
0.1527
0.1179
0.1882
0.1856
0.1549
0.1798
0.2879
0.1156
0.1749
0.1297
0.1522
0.1308
0.2123
0.1579
0.0937
0.1310
0.2052
0.1510
0.1542
0.1416
0.1203
0.1372
0.1980
0.1352
0.2065
0.1385
0.1358
0.1696
0.2816
0.1058
0.0886
0.1123
0.2269
0.1117
0.1080
0.2029
0.1026
0.1150
0.1452
0.1180
0.1690
0.2079
0.1133
0.1933
0.1454
0.0872
0.1002
0.1101
0.1757
0.1967
0.0907
0.1175
0.1508
0.1348
0.1203
0.1413
0.2189
0.1030
0.1387
0.1669
0.1090
0.1632
0.0875
0.1349
0.1074
0.2839
0.1628
0.1872
0.1538
0.0973
0.1831
0.2120
0.1716
0.0890
0.1869
0.0877
0.1300
0.2792
0.1565
0.1605
0.1427
0.1366
0.1441
0.1080
0.1166
0.1286
0.2491
0.1250
0.1469
0.1387
0.1164
0.1255
0.0991
0.0942
0.1549
0.2844
0.1235
0.1733
0.1828
0.0891
0.1104
0.1286
0.1495
0.1249
0.4629
0.1380
0.1015
0.1064
0.1612
0.2148
0.1442
0.1714
0.1601
0.2572
0.1879
[torch.FloatTensor of size 256]
), ('layer3.1.conv2.weight',
( 0 , 0 ,.,.) =
-4.2568e-02 -2.6148e-02 -2.2019e-02
-1.7334e-02 -7.5950e-03 -7.2384e-03
-1.7876e-03 2.3800e-02 1.4873e-02
( 0 , 1 ,.,.) =
-2.8277e-03 -5.0644e-03 -4.9442e-03
1.2117e-03 1.4908e-02 1.6013e-02
1.4391e-02 3.3109e-02 5.0061e-02
( 0 , 2 ,.,.) =
-3.4891e-03 -4.4437e-03 2.6589e-03
1.5105e-02 2.6303e-02 2.6802e-02
3.9232e-02 5.0057e-02 4.6637e-02
...
( 0 ,253,.,.) =
2.2877e-02 1.5454e-02 -2.4483e-02
3.1145e-02 3.4944e-02 1.3296e-02
-1.7674e-04 7.3297e-03 -5.7174e-03
( 0 ,254,.,.) =
-2.1781e-02 -3.7379e-02 -1.3382e-02
1.8976e-02 1.4155e-02 -6.5395e-03
2.6831e-02 3.6354e-02 1.1450e-02
( 0 ,255,.,.) =
3.1603e-02 3.3933e-02 3.1575e-02
-1.0098e-02 -1.2657e-02 1.1674e-02
1.0325e-02 7.9424e-05 1.5911e-02
⋮
( 1 , 0 ,.,.) =
2.5937e-02 6.2590e-03 6.0798e-03
-4.5745e-03 -3.5188e-02 -2.9249e-02
2.1366e-02 2.0480e-03 6.2699e-03
( 1 , 1 ,.,.) =
-2.9549e-03 -1.3679e-03 -8.6876e-03
7.9988e-03 1.2888e-03 -5.9629e-03
-9.1481e-03 -2.1914e-02 -4.1572e-02
( 1 , 2 ,.,.) =
7.6390e-03 3.0253e-03 2.7817e-04
7.0329e-03 1.1914e-02 -2.4419e-03
-8.2131e-03 -9.7848e-05 -1.9223e-02
...
( 1 ,253,.,.) =
-4.4498e-03 5.1611e-03 3.7416e-03
3.2110e-04 8.3762e-03 3.6612e-03
9.3343e-03 8.1829e-03 1.1234e-03
( 1 ,254,.,.) =
-6.6849e-02 -5.9871e-02 -3.3931e-02
2.2337e-02 3.1932e-02 3.7244e-02
9.3296e-03 3.7222e-02 1.4052e-02
( 1 ,255,.,.) =
-2.0643e-03 1.2408e-02 -3.1072e-03
-8.2882e-03 1.3917e-02 -2.0680e-02
-1.9329e-02 1.1953e-02 -2.3436e-02
⋮
( 2 , 0 ,.,.) =
-1.7788e-03 -3.5982e-03 -1.2592e-03
-1.5320e-02 -1.0690e-02 -2.0311e-02
-3.4649e-04 -2.2188e-03 -1.5021e-02
( 2 , 1 ,.,.) =
-2.8952e-02 -3.3958e-02 -2.5437e-02
-1.5919e-04 1.5204e-02 3.4554e-02
3.6892e-02 7.0144e-02 7.3610e-02
( 2 , 2 ,.,.) =
1.0721e-02 2.1531e-03 -5.6155e-03
1.1754e-02 -4.8546e-03 -5.5013e-03
-3.7388e-04 -9.7639e-03 -1.5029e-02
...
( 2 ,253,.,.) =
1.5622e-02 9.8976e-03 3.4725e-03
1.4711e-02 7.0707e-03 -9.1826e-03
7.0986e-03 6.3087e-03 -3.5893e-03
( 2 ,254,.,.) =
-6.4518e-03 -6.7673e-03 1.1635e-02
1.4707e-02 2.3831e-02 4.9396e-02
1.8897e-02 3.4981e-02 4.5488e-02
( 2 ,255,.,.) =
1.5900e-02 3.3369e-02 2.6194e-02
1.0616e-02 1.8515e-02 3.0190e-03
1.1004e-02 2.5503e-02 1.3654e-02
...
⋮
(253, 0 ,.,.) =
-2.1231e-02 -1.2804e-02 -1.5498e-02
7.6750e-03 1.2120e-02 1.5099e-02
1.8536e-02 2.5110e-02 2.5283e-02
(253, 1 ,.,.) =
7.4059e-03 -3.0540e-03 -1.5475e-03
-8.4415e-03 -2.2002e-02 -3.4099e-03
9.1918e-03 2.2617e-03 -1.4260e-02
(253, 2 ,.,.) =
-5.2568e-03 -5.3507e-03 -3.2230e-03
-1.5805e-02 6.0508e-03 -1.5917e-03
-8.9323e-03 2.6483e-03 5.0508e-03
...
(253,253,.,.) =
1.9826e-02 -2.1209e-03 1.4889e-02
5.7275e-02 3.5549e-02 6.0175e-03
2.3347e-02 -2.2153e-02 -2.5497e-02
(253,254,.,.) =
-1.3985e-02 -6.4766e-02 -1.7286e-02
1.1704e-02 1.0714e-02 4.6278e-02
-1.0038e-02 -3.5707e-03 2.2691e-02
(253,255,.,.) =
-8.3342e-03 -1.3070e-03 -1.0049e-02
3.2605e-02 5.3259e-02 2.2172e-02
3.7339e-02 6.1155e-02 4.4555e-02
⋮
(254, 0 ,.,.) =
-1.6584e-02 -1.3850e-02 -1.4604e-02
-1.7604e-02 -2.1268e-02 -1.6734e-02
-6.0039e-04 3.8569e-03 1.2837e-02
(254, 1 ,.,.) =
1.7623e-02 2.3706e-02 2.7633e-02
-2.2841e-02 -1.9576e-02 -1.6551e-02
-8.0822e-03 4.3779e-03 -5.3622e-03
(254, 2 ,.,.) =
1.5582e-02 3.7879e-02 2.3555e-02
-6.4632e-03 9.8620e-03 1.2121e-02
-1.3743e-02 -6.1246e-03 -2.7332e-03
...
(254,253,.,.) =
-2.5037e-03 -1.2064e-02 -9.0989e-03
-4.7911e-04 -2.8339e-03 2.1365e-03
-6.2077e-03 -2.6615e-03 1.1215e-02
(254,254,.,.) =
-8.1794e-03 -2.2417e-02 -3.4012e-02
-2.8553e-02 -2.9546e-02 -4.4372e-02
-5.0348e-02 -3.4973e-02 -5.2028e-02
(254,255,.,.) =
1.4728e-02 3.2834e-02 2.6312e-02
1.3449e-02 2.6407e-02 2.6924e-02
2.5572e-02 3.4316e-02 2.6184e-02
⋮
(255, 0 ,.,.) =
7.2026e-03 -2.3931e-03 2.2182e-03
4.2555e-03 -6.4084e-03 7.8548e-03
2.0510e-02 1.8644e-02 2.3280e-02
(255, 1 ,.,.) =
-1.2471e-02 1.3008e-02 1.0010e-02
-1.7496e-03 6.1331e-03 4.3366e-03
5.2269e-03 1.5111e-02 -8.1881e-03
(255, 2 ,.,.) =
-3.7337e-02 1.9923e-02 -2.4149e-02
-4.9487e-02 -1.0510e-02 -4.2107e-02
-5.7684e-03 -4.8632e-03 -1.8332e-02
...
(255,253,.,.) =
7.2013e-03 -1.5208e-02 -1.6507e-02
-8.8276e-03 -1.8698e-02 -1.6637e-03
-1.2015e-02 2.9667e-03 6.2300e-03
(255,254,.,.) =
-1.8341e-02 -9.0521e-03 2.6030e-02
3.5930e-02 5.3049e-02 5.8487e-02
-1.3661e-02 -3.6888e-03 -7.1606e-03
(255,255,.,.) =
-1.2594e-02 -4.0898e-02 1.7162e-03
-1.7420e-02 -4.3435e-02 -1.3183e-02
-3.7506e-02 -5.5707e-02 -3.0051e-02
[torch.FloatTensor of size 256x256x3x3]
), ('layer3.1.bn2.weight',
0.1971
0.1771
0.1303
0.1995
0.1839
0.0934
0.2333
0.2236
0.1654
0.1280
0.0842
0.1085
0.3168
0.2032
0.3246
0.2184
0.3208
0.2824
0.3408
0.3339
0.3307
0.5571
0.2821
0.3081
0.2114
0.2971
0.2361
0.5500
0.1221
0.3381
0.1528
0.1544
0.1982
0.0582
0.1812
0.2489
0.1954
0.0705
0.0918
0.1328
0.2616
0.2013
0.0720
0.1573
0.1919
0.0813
0.1170
0.2504
0.2863
0.3032
0.1476
0.3696
0.1870
0.2097
0.1907
0.2364
0.1642
0.1079
0.2531
0.1703
0.1266
0.0814
0.2407
0.2609
0.2705
0.2128
0.5007
0.2375
0.0802
0.2896
0.1776
0.0887
0.1094
0.1834
0.2812
0.1971
0.2021
0.3443
0.1411
0.1362
0.2676
0.1618
0.2723
0.2727
0.2528
0.0982
0.4707
0.2239
0.3649
0.1987
0.0815
0.2543
0.3322
0.1561
0.2336
0.1294
0.2570
0.1700
0.1374
0.2215
0.5015
0.3132
0.1487
0.1174
0.0916
0.2130
0.1393
0.3057
0.5634
0.1018
0.0994
0.0492
0.4427
0.3142
0.4002
0.1334
0.2174
0.5522
0.2806
0.2784
0.4333
0.2602
0.3788
0.1827
0.2664
0.1077
0.3001
0.2428
0.5130
0.0829
0.1254
0.1996
0.1451
0.2253
0.1467
0.3712
0.0794
0.5425
0.2058
0.2103
0.1288
0.4993
0.1815
0.1845
0.4154
0.3817
0.2054
0.2205
0.1471
0.4964
0.4202
0.0801
0.0623
0.3536
0.2760
0.3840
0.1632
0.1402
0.2674
0.0844
0.2305
0.2259
0.2146
0.4181
0.2821
0.2926
0.3416
0.4640
0.3025
0.3732
0.5871
0.0616
0.2797
0.3042
0.2173
0.3550
0.2096
0.2449
0.3428
0.2868
0.3543
0.4667
0.3220
0.3805
0.2632
0.2160
0.1924
0.4074
0.4966
0.3623
0.1670
0.1321
0.2374
0.2118
0.1522
0.1668
0.3836
0.0983
0.3729
0.3943
0.4353
0.2270
0.1508
0.3133
0.3850
0.5774
0.1892
0.2822
0.0907
0.2364
0.0964
0.2360
0.0699
0.2938
0.5100
0.3348
0.2339
0.1145
0.2155
0.2266
0.2829
0.2341
0.1891
0.2906
0.2681
0.3876
0.3915
0.1844
0.1889
0.4405
0.1405
0.3460
0.2724
0.2567
0.2785
0.1148
0.1607
0.1754
0.0883
0.1649
0.1268
0.2356
0.2811
0.0766
0.1424
0.1683
0.3979
0.2685
0.6383
0.1087
0.3180
0.1760
0.3634
0.2615
0.1999
0.2541
[torch.FloatTensor of size 256]
), ('layer3.1.bn2.bias',
-0.0162
-0.2033
0.0294
-0.1697
-0.1840
-0.0309
-0.2039
-0.1426
-0.0443
-0.0886
-0.0647
-0.0968
-0.0380
-0.2073
-0.3061
0.1443
-0.3079
-0.1232
-0.1627
-0.0980
-0.2471
-0.2837
-0.1201
-0.2893
-0.2303
-0.3562
-0.0825
-0.3483
0.0707
-0.1321
-0.1074
-0.1451
0.0235
0.0225
-0.1885
-0.2507
-0.2461
0.0631
-0.0023
-0.1209
-0.2581
-0.1640
-0.0172
-0.1143
-0.2096
-0.0158
0.0128
-0.1332
-0.3139
-0.2294
-0.1527
-0.3503
0.2086
0.0785
-0.1597
-0.1990
0.0346
0.0388
-0.1269
0.1019
0.0981
-0.0390
-0.2537
-0.1356
-0.1796
-0.2422
-0.4517
-0.3124
-0.0177
-0.2615
-0.1567
0.0212
-0.0753
-0.1426
-0.2788
0.0062
-0.1895
-0.2327
-0.1298
-0.1200
-0.1917
-0.0987
-0.1916
-0.1666
-0.2729
0.1287
-0.4620
-0.2259
-0.2270
0.1939
0.0230
-0.3303
-0.3202
-0.1292
-0.0716
0.0048
-0.2579
-0.0116
-0.0557
-0.1229
-0.4804
-0.2351
-0.1367
-0.0578
-0.0537
-0.2743
-0.0827
-0.1922
-0.3481
-0.0358
-0.1094
0.0138
-0.1888
-0.2592
-0.3293
-0.0820
-0.1839
-0.1636
-0.3163
-0.0246
-0.1667
-0.1653
-0.3076
-0.2229
-0.1834
-0.0536
-0.0621
-0.1752
-0.5243
-0.1933
-0.1119
-0.2283
-0.0437
-0.1777
-0.1300
-0.2519
-0.0456
-0.6305
-0.1364
-0.2138
0.0406
-0.5287
-0.2014
-0.1442
-0.1930
-0.3033
0.1030
-0.1499
-0.2297
-0.5301
-0.2543
-0.0417
0.0429
-0.3218
-0.1611
-0.2562
-0.1187
-0.1001
0.0225
0.0996
-0.2138
-0.2019
0.0808
-0.0121
-0.2364
-0.3247
-0.1482
-0.4846
-0.3449
-0.1365
-0.6664
0.0418
-0.2807
-0.0961
-0.2378
-0.1834
-0.1890
-0.0377
-0.3056
-0.1843
-0.1357
-0.3038
-0.2680
-0.4143
-0.2633
-0.1750
-0.1856
-0.2405
-0.1082
-0.2250
-0.1268
-0.1094
0.0594
-0.1419
-0.1178
-0.1602
-0.0328
-0.0194
-0.1985
0.0470
-0.1887
-0.2776
-0.0930
-0.4092
-0.3378
-0.7252
0.0260
-0.1829
0.0561
-0.2227
-0.0026
-0.3218
-0.0093
-0.2843
-0.5121
-0.2337
-0.0836
-0.0818
-0.1296
-0.2090
0.0169
-0.1899
-0.1892
-0.3075
-0.3108
-0.2986
-0.4712
-0.1823
-0.1893
-0.3131
-0.0876
-0.1166
-0.2995
-0.0831
-0.3427
-0.0772
-0.1460
-0.1611
0.0203
-0.0627
-0.0610
-0.2574
-0.1383
0.0470
-0.0302
-0.1638
-0.3323
-0.1741
-0.6307
-0.0772
-0.2123
-0.1559
-0.0459
-0.2416
-0.0143
-0.2079
[torch.FloatTensor of size 256]
), ('layer3.1.bn2.running_mean',
-0.0823
-0.0332
-0.0266
-0.0132
-0.0638
-0.1047
-0.0671
-0.0530
0.0623
-0.0001
-0.0130
0.0178
-0.0493
-0.0653
-0.0985
0.2112
-0.0922
-0.1139
-0.1879
-0.0822
-0.0585
-0.5599
-0.0512
-0.1987
-0.0715
-0.0665
-0.1684
-0.1031
0.1054
-0.2306
-0.0339
-0.0807
-0.0035
0.0472
-0.0788
-0.1265
-0.0615
0.0643
0.0278
0.0031
-0.0767
-0.0951
0.0001
-0.0472
-0.0309
-0.0464
-0.0633
-0.0814
-0.0705
-0.0671
-0.0792
-0.1778
0.0551
0.4111
0.0016
-0.0799
-0.0692
-0.0498
0.0038
0.0025
0.0356
-0.0258
-0.1225
-0.0464
-0.1216
-0.0643
-0.1383
-0.0232
-0.0181
-0.1470
0.0488
-0.0557
-0.0348
-0.0566
-0.0651
-0.1047
-0.0526
-0.1851
-0.1060
0.0265
0.0717
-0.0200
-0.1123
-0.2006
-0.1032
0.0041
-0.2242
-0.1273
-0.0887
0.1082
-0.0589
-0.0834
-0.1046
0.0230
-0.0885
0.0335
-0.1107
-0.0092
-0.0460
-0.1370
-0.2022
0.0463
-0.0197
-0.0119
-0.0600
-0.0751
-0.0727
-0.0911
-0.0921
-0.0580
-0.0364
-0.0447
-0.1709
-0.1368
-0.1476
-0.1476
-0.1123
-0.2308
-0.0906
-0.2593
-0.2083
-0.0634
-0.2680
-0.1612
-0.1003
-0.1318
-0.0904
-0.0967
-0.1973
0.0535
-0.0278
-0.0744
-0.0563
-0.0591
-0.0241
-0.1510
0.0022
-0.1889
-0.1289
-0.0196
-0.0728
-0.1840
-0.0609
-0.0200
-0.4031
-0.0734
-0.0960
-0.0865
-0.0683
-0.1690
-0.0987
0.0124
-0.0443
-0.1937
-0.1329
-0.1207
-0.1423
0.1048
-0.0854
0.0638
-0.0578
-0.0865
0.0336
-0.3877
-0.0574
-0.0753
-0.2072
-0.1827
-0.1238
-0.2206
-0.1645
-0.0293
-0.1309
-0.1585
-0.0439
-0.1387
-0.0617
-0.0198
-0.1220
-0.0763
-0.3562
0.3413
-0.0658
-0.1264
-0.1754
0.0417
-0.0362
-0.1289
-0.2704
-0.2141
0.0194
0.0131
-0.0095
-0.0603
-0.0342
-0.0799
-0.3172
-0.0341
-0.1697
-0.1221
-0.3093
-0.1225
-0.0283
-0.1518
-0.0933
-0.2834
-0.0030
-0.0359
0.0030
-0.0460
-0.0855
-0.0382
-0.0174
-0.0826
-0.1509
-0.1769
-0.1501
0.0015
-0.0877
-0.0333
-0.2545
-0.0130
-0.0469
-0.0884
-0.0899
-0.1911
-0.1313
-0.0350
-0.1149
-0.1889
-0.0409
-0.3819
-0.0897
-0.1702
-0.1546
0.0307
-0.0409
-0.0370
-0.0691
-0.0420
0.0535
-0.1312
-0.1199
-0.0578
-0.0546
-0.1234
-0.1162
-0.1232
-0.2521
-0.0410
-0.1056
-0.0522
-0.2415
-0.0947
-0.1564
-0.1230
[torch.FloatTensor of size 256]
), ('layer3.1.bn2.running_var',
1.00000e-02 *
1.8530
2.6394
1.2856
1.2689
1.4751
1.5978
1.2460
1.8145
1.9383
1.1628
0.7526
1.1219
3.4934
1.7682
2.0756
2.5356
2.3623
1.9827
2.2904
3.4381
1.9890
5.3809
3.1563
1.9644
1.2794
1.9174
1.9996
2.8697
1.8158
2.7271
1.1315
1.1837
2.1068
0.8485
1.6900
1.5894
1.2585
1.5643
0.9987
0.8934
1.8547
1.5648
0.8220
1.3122
1.1694
1.0160
1.0536
2.4400
1.3983
1.6112
1.1215
1.6260
2.9490
3.5517
1.4957
2.0074
1.7436
1.1895
2.0108
3.5077
1.8778
1.2123
1.3372
2.3718
2.4176
1.7590
2.8278
1.4679
1.0504
1.6922
2.4693
1.8485
1.0362
1.6711
1.3731
2.2465
1.1501
2.4804
1.2025
1.2570
2.1939
1.6975
1.7424
2.1701
1.7643
1.9817
2.3719
1.2252
2.7208
6.0497
1.1410
1.2758
1.9986
1.9867
2.2465
1.9493
1.6528
2.3030
1.8762
2.0904
2.1493
2.4125
1.2632
1.4381
1.2692
0.9812
1.6465
1.6313
4.0332
1.5335
1.4138
0.5016
3.3883
1.5893
2.2616
1.2452
1.7113
4.5386
1.6545
3.1452
4.1772
2.2630
2.4911
1.6146
1.9939
1.0476
2.7228
1.4376
2.7456
1.1228
0.9946
0.9961
2.0141
1.8478
0.9278
2.0743
0.8191
2.8920
1.5118
1.3835
1.6313
2.1079
1.2747
1.3377
4.0637
2.5386
2.1835
1.5742
0.8617
1.8719
3.4042
0.9114
0.6896
2.1932
2.0435
3.3022
2.0678
1.2721
4.0421
1.4042
1.4963
1.6277
4.7124
5.3335
1.4413
1.6945
2.4771
2.2726
1.6246
3.9784
2.5942
0.6863
1.6802
2.0793
1.8885
2.3745
1.5412
2.2586
1.8481
2.0519
2.9992
5.0411
1.8868
1.7426
1.4016
1.8388
2.3564
4.0373
6.9144
2.7295
2.0721
1.6795
2.3665
1.8622
1.4306
1.1317
4.2427
1.4678
2.1902
4.3384
4.4179
1.1020
1.7952
1.4674
1.5806
2.5305
2.3667
2.1030
1.1405
1.9686
1.1575
1.3539
0.9093
2.0994
2.3689
2.5597
3.3564
1.6578
1.5100
1.3169
2.9548
2.6140
1.6542
1.3860
1.1822
2.0687
1.8049
0.9810
1.6161
3.9758
1.1737
3.2446
1.3650
2.2535
1.7262
1.2099
0.9317
1.1607
1.1387
1.9526
1.4283
1.0255
2.0608
1.1115
1.6965
1.3392
2.2404
2.1118
3.9554
0.7481
2.4169
1.2689
3.6180
1.6522
1.8249
1.6171
[torch.FloatTensor of size 256]
), ('layer4.0.conv1.weight',
( 0 , 0 ,.,.) =
-1.1645e-02 -1.9010e-02 -2.1876e-02
2.0482e-02 2.3962e-02 2.9161e-02
4.3672e-02 3.3278e-02 4.9908e-02
( 0 , 1 ,.,.) =
-7.4040e-03 2.8083e-03 -4.7339e-03
6.9030e-03 1.4271e-02 -3.6954e-03
-3.1341e-03 1.3736e-02 1.6127e-03
( 0 , 2 ,.,.) =
1.8676e-02 -1.0553e-02 -1.4233e-02
8.9944e-03 -2.5068e-03 -1.2145e-02
-4.9455e-03 -2.9206e-02 -9.6385e-03
...
( 0 ,253,.,.) =
-1.2655e-02 1.7691e-02 9.8264e-04
7.4271e-03 7.6115e-03 1.1135e-02
2.3242e-02 1.1058e-02 4.0498e-03
( 0 ,254,.,.) =
1.8557e-02 1.2472e-02 1.7220e-02
-4.8544e-03 8.3627e-03 2.2811e-02
-5.1675e-03 2.3264e-02 3.4068e-02
( 0 ,255,.,.) =
2.4934e-02 2.2373e-02 4.2614e-02
1.3486e-02 1.6760e-03 1.3019e-02
-6.2821e-03 -1.5112e-03 -8.9229e-03
⋮
( 1 , 0 ,.,.) =
-9.8089e-04 -6.3011e-03 5.9932e-03
1.5936e-02 1.3394e-02 2.9934e-02
2.3149e-02 2.0709e-02 2.5485e-02
( 1 , 1 ,.,.) =
-2.0015e-02 -3.3349e-02 -8.0396e-03
-7.2800e-03 -1.2187e-02 -2.0389e-04
-1.3138e-02 -2.0427e-02 -1.6286e-02
( 1 , 2 ,.,.) =
-6.7681e-03 5.0045e-03 -2.6683e-03
-2.1073e-02 2.8275e-04 -1.8205e-02
-1.7382e-02 -5.0244e-03 -3.0386e-03
...
( 1 ,253,.,.) =
-1.1035e-02 -2.2964e-02 -1.1028e-02
-6.3256e-03 -4.1667e-03 -1.7323e-02
-1.3611e-02 -2.3468e-02 -1.6436e-02
( 1 ,254,.,.) =
7.3663e-03 6.6219e-03 5.2776e-03
-3.5464e-03 3.2750e-03 -9.1126e-03
3.5593e-04 -1.0151e-02 -1.9123e-02
( 1 ,255,.,.) =
1.8193e-03 8.8087e-03 5.1361e-03
3.1915e-03 2.5287e-02 2.4939e-02
1.3968e-02 1.9613e-02 2.2382e-02
⋮
( 2 , 0 ,.,.) =
-4.1548e-03 8.8964e-03 2.0143e-03
1.1327e-02 1.3251e-02 1.4014e-02
7.2196e-03 1.3045e-02 2.4827e-02
( 2 , 1 ,.,.) =
-1.5025e-02 5.0530e-03 7.4766e-03
-2.4685e-02 -1.6732e-02 -1.0888e-02
-2.8064e-02 -1.1875e-02 -3.4120e-03
( 2 , 2 ,.,.) =
2.8449e-02 1.4594e-02 6.9441e-03
2.4799e-02 1.9453e-02 1.1294e-02
-1.0787e-02 -2.1006e-02 -1.0372e-02
...
( 2 ,253,.,.) =
1.4967e-02 8.2449e-03 2.0244e-03
1.4287e-02 -6.3867e-03 -8.0757e-03
2.7547e-02 1.0791e-02 1.6567e-02
( 2 ,254,.,.) =
3.6191e-02 3.8918e-02 3.9028e-02
-8.3489e-04 1.3273e-02 2.0172e-02
-2.0652e-02 -5.4010e-03 1.7147e-03
( 2 ,255,.,.) =
2.0373e-04 3.5919e-03 8.5592e-03
6.2363e-03 -9.3086e-05 1.2940e-02
1.3152e-02 1.0732e-02 1.9896e-02
...
⋮
(509, 0 ,.,.) =
-1.7400e-02 -6.7019e-03 -9.1787e-03
-9.9672e-03 2.6298e-04 3.3439e-03
1.5721e-02 1.4216e-02 2.0509e-02
(509, 1 ,.,.) =
2.1410e-02 3.6914e-02 2.8239e-02
3.8158e-02 4.8944e-02 3.4652e-02
3.1723e-02 4.4208e-02 4.0035e-02
(509, 2 ,.,.) =
-3.3437e-03 -1.0482e-02 -5.3990e-03
-5.3186e-03 1.1394e-02 1.7593e-03
-5.6652e-03 -6.6373e-03 -1.3492e-02
...
(509,253,.,.) =
-1.7099e-02 -1.8145e-03 -1.3040e-02
-2.2750e-02 -3.6062e-03 -8.0294e-03
-1.6087e-02 -1.0175e-02 -1.3529e-02
(509,254,.,.) =
4.1701e-04 -5.1785e-03 -2.1884e-02
2.6919e-03 8.9139e-03 -1.4217e-04
-7.3746e-03 -6.6853e-03 -2.3725e-02
(509,255,.,.) =
1.9425e-02 1.3175e-02 1.7511e-02
1.8235e-02 4.4286e-02 2.3767e-02
2.6504e-02 3.3104e-02 1.9696e-02
⋮
(510, 0 ,.,.) =
-1.0177e-02 -1.0701e-02 -2.0428e-02
-1.7986e-02 5.9928e-03 -1.0584e-03
-1.8794e-02 -1.8773e-03 -6.9449e-03
(510, 1 ,.,.) =
-2.8498e-03 1.6427e-03 1.4575e-04
-5.4403e-03 8.3667e-03 -9.4164e-03
-4.4999e-03 5.4902e-03 2.4863e-03
(510, 2 ,.,.) =
-1.3356e-02 -2.1525e-02 5.3421e-04
-1.9160e-02 -2.4645e-02 -1.3791e-02
-6.1991e-03 -1.3174e-02 -3.6783e-03
...
(510,253,.,.) =
-3.3993e-03 -2.7823e-03 7.6715e-03
-2.0649e-02 -1.2731e-02 -9.4138e-03
-1.3678e-03 -3.4410e-02 -2.6984e-02
(510,254,.,.) =
-3.5651e-04 2.0102e-03 1.4130e-02
-1.3073e-02 -1.6616e-02 -1.2690e-02
-3.5934e-02 -4.1700e-02 -3.3968e-02
(510,255,.,.) =
2.0470e-02 8.0159e-04 -1.1607e-03
9.5101e-03 3.0336e-02 2.7362e-02
1.5588e-02 3.2851e-02 1.3015e-02
⋮
(511, 0 ,.,.) =
-1.5574e-02 -3.2971e-02 -3.1939e-02
-2.2502e-02 -5.7187e-03 -5.6729e-03
-2.7309e-02 -1.6981e-02 1.2832e-04
(511, 1 ,.,.) =
-1.1925e-02 -2.9479e-02 -2.0437e-02
-2.4408e-02 -2.2069e-02 -1.9965e-03
-2.3279e-02 -5.5140e-03 2.5630e-02
(511, 2 ,.,.) =
-1.6100e-02 -8.2417e-03 1.5266e-04
-2.6195e-03 -8.2754e-03 -2.9435e-02
-2.7493e-03 -2.4889e-02 -2.3583e-02
...
(511,253,.,.) =
1.7985e-02 1.8594e-02 8.9198e-04
-1.7319e-02 7.8735e-03 -2.8659e-03
3.8596e-03 2.9061e-02 2.4188e-02
(511,254,.,.) =
-2.6735e-02 -1.4391e-02 -4.0148e-02
-2.6728e-02 -2.4455e-02 -6.9176e-03
-5.7244e-02 -2.1995e-04 5.5438e-02
(511,255,.,.) =
2.3487e-02 2.7157e-03 -8.4719e-04
1.7886e-02 5.4860e-03 2.8059e-02
4.6468e-03 1.8598e-02 1.3761e-03
[torch.FloatTensor of size 512x256x3x3]
), ('layer4.0.bn1.weight',
0.2427
0.2232
0.2511
0.2288
0.2074
0.2905
0.2482
0.3102
0.2749
0.2892
0.2448
0.1759
0.2426
0.2780
0.2315
0.2631
0.3383
0.2785
0.2536
0.2989
0.2335
0.2812
0.3486
0.2778
0.2280
0.2547
0.3032
0.2468
0.2512
0.2973
0.2577
0.3200
0.2385
0.2714
0.2532
0.2625
0.3344
0.2626
0.1838
0.2839
0.2187
0.2666
0.2858
0.2471
0.2915
0.2332
0.2637
0.2691
0.2432
0.2384
0.2356
0.2525
0.2564
0.2451
0.2529
0.2522
0.2800
0.3165
0.2340
0.2634
0.2569
0.1942
0.2621
0.2205
0.2301
0.2323
0.2811
0.1897
0.2280
0.3472
0.2717
0.3191
0.2440
0.2719
0.2781
0.2262
0.3444
0.2648
0.2725
0.2851
0.2039
0.2935
0.2742
0.2774
0.2654
0.2430
0.2721
0.2708
0.3085
0.2895
0.2596
0.2147
0.3119
0.3449
0.2262
0.2814
0.2326
0.2712
0.2637
0.2323
0.3333
0.2714
0.2991
0.2747
0.2515
0.2394
0.2709
0.2836
0.2866
0.2408
0.2560
0.2048
0.2394
0.2813
0.3267
0.2761
0.2123
0.2715
0.2540
0.2771
0.3209
0.1905
0.3989
0.2676
0.2357
0.2169
0.3216
0.3596
0.2838
0.2648
0.2702
0.2469
0.2442
0.2553
0.2599
0.2693
0.2399
0.2700
0.2063
0.2711
0.2834
0.2781
0.2529
0.2013
0.2343
0.2082
0.3063
0.1635
0.2673
0.2197
0.2787
0.2724
0.2744
0.2287
0.2969
0.2662
0.2982
0.2396
0.3039
0.2319
0.2773
0.2661
0.2898
0.2489
0.3060
0.2612
0.2937
0.3045
0.2999
0.2580
0.2093
0.2714
0.2993
0.2679
0.2963
0.2754
0.2580
0.2566
0.2634
0.2325
0.2442
0.2934
0.2398
0.2631
0.2851
0.2870
0.2239
0.2410
0.2676
0.2681
0.2638
0.2732
0.2812
0.2203
0.2670
0.2764
0.2550
0.3160
0.2888
0.2615
0.2178
0.2485
0.2414
0.2798
0.2872
0.2767
0.2551
0.2429
0.2459
0.3288
0.3024
0.2912
0.2625
0.3019
0.2643
0.2721
0.2108
0.2368
0.2269
0.1988
0.2830
0.2569
0.2349
0.2755
0.2442
0.2717
0.2747
0.2785
0.2516
0.2227
0.2783
0.2465
0.2652
0.2641
0.2960
0.2671
0.2679
0.2537
0.2847
0.2507
0.2525
0.2024
0.2311
0.2618
0.2764
0.3031
0.2452
0.2716
0.2273
0.2295
0.2611
0.2329
0.2690
0.2753
0.2737
0.2590
0.2421
0.2685
0.3392
0.3073
0.1371
0.3650
0.2980
0.2460
0.2487
0.2912
0.2704
0.2560
0.2213
0.2569
0.2661
0.2367
0.2742
0.2847
0.3055
0.2671
0.2819
0.2791
0.2401
0.2549
0.2210
0.3507
0.2852
0.2162
0.2821
0.2369
0.2905
0.2826
0.2300
0.2745
0.2437
0.2522
0.2489
0.2395
0.2851
0.2887
0.2621
0.2500
0.2689
0.2427
0.3010
0.3067
0.2861
0.2387
0.2462
0.2859
0.2550
0.2630
0.2442
0.2145
0.2898
0.2282
0.2327
0.2242
0.2738
0.2485
0.2379
0.3058
0.2798
0.2761
0.2252
0.2866
0.2660
0.3250
0.2612
0.2767
0.3205
0.2932
0.3183
0.2939
0.3103
0.2553
0.2981
0.3667
0.3086
0.2254
0.2352
0.2348
0.2555
0.2597
0.2369
0.3017
0.2776
0.2728
0.3174
0.2785
0.2721
0.2637
0.2702
0.3633
0.2869
0.2675
0.3405
0.2587
0.2732
0.2747
0.2821
0.2750
0.2630
0.2018
0.2358
0.3034
0.3155
0.3013
0.2775
0.2511
0.2945
0.1605
0.2825
0.2964
0.2194
0.2061
0.2332
0.2348
0.2663
0.2543
0.2927
0.2215
0.2521
0.2827
0.1993
0.2453
0.2597
0.2654
0.2757
0.2650
0.2444
0.2949
0.2308
0.3071
0.1904
0.3024
0.2786
0.3659
0.2966
0.2746
0.2449
0.2201
0.2564
0.2853
0.2392
0.2457
0.2467
0.2374
0.2664
0.2460
0.3182
0.1793
0.2379
0.2596
0.2847
0.2452
0.1974
0.2388
0.2949
0.2879
0.2786
0.2765
0.3296
0.2530
0.2690
0.2547
0.2333
0.2348
0.2690
0.2718
0.2679
0.2516
0.2710
0.2366
0.2601
0.2764
0.2880
0.2008
0.2637
0.2263
0.2511
0.2604
0.2805
0.2989
0.2965
0.2597
0.2767
0.2553
0.2959
0.2512
0.2925
0.3008
0.2423
0.2394
0.2708
0.3704
0.2879
0.2532
0.2248
0.2023
0.2279
0.2366
0.3082
0.2980
0.2909
0.2777
0.4293
0.2658
0.2940
0.2418
0.2816
0.3247
0.2647
0.2216
0.2758
0.2421
0.2078
0.2332
0.2271
0.2611
0.3650
0.2017
0.2598
0.2160
0.2641
0.1408
0.2664
0.2502
0.2553
0.2227
0.2417
0.2696
0.2388
0.2833
0.2333
0.2667
0.2224
0.2691
0.2710
0.2459
0.2674
0.2430
0.2593
0.1851
0.2950
0.3664
0.2212
0.3026
0.1840
0.3443
0.2140
0.3717
0.2360
0.3081
0.2638
0.2233
[torch.FloatTensor of size 512]
), ('layer4.0.bn1.bias',
-0.1986
-0.1593
-0.2054
-0.1598
-0.1268
-0.3226
-0.1597
-0.3477
-0.2497
-0.2730
-0.2319
-0.0286
-0.1899
-0.2813
-0.1733
-0.2412
-0.3712
-0.2747
-0.2053
-0.2585
-0.1535
-0.2748
-0.3241
-0.2525
-0.1906
-0.2252
-0.3436
-0.2202
-0.1664
-0.2716
-0.1920
-0.3399
-0.2026
-0.2972
-0.2616
-0.2238
-0.2486
-0.2606
-0.0893
-0.3572
-0.1283
-0.2583
-0.2450
-0.1523
-0.3165
-0.1445
-0.2522
-0.1963
-0.1794
-0.1071
-0.1662
-0.2053
-0.2530
-0.1447
-0.2517
-0.2062
-0.2817
-0.3376
-0.1382
-0.2389
-0.2557
-0.0156
-0.2169
-0.1763
-0.1486
-0.2122
-0.2002
-0.0716
-0.2089
-0.3580
-0.2588
-0.3599
-0.1528
-0.2107
-0.2925
-0.1855
-0.3970
-0.1257
-0.2574
-0.2412
-0.0863
-0.3065
-0.2701
-0.3380
-0.2485
-0.1935
-0.2987
-0.2279
-0.3600
-0.2764
-0.2480
-0.1208
-0.3378
-0.2661
-0.1677
-0.2470
-0.2152
-0.2591
-0.1936
-0.1543
-0.4117
-0.1570
-0.2372
-0.2997
-0.2124
-0.2034
-0.1848
-0.3070
-0.3438
-0.1839
-0.1937
-0.0916
-0.2338
-0.3558
-0.1967
-0.3303
-0.1398
-0.2177
-0.1665
-0.1857
-0.3115
-0.1049
-0.4229
-0.2408
-0.1320
-0.1631
-0.3378
-0.3300
-0.3183
-0.2268
-0.2787
-0.1950
-0.1950
-0.1463
-0.2437
-0.2297
-0.1282
-0.2164
-0.1179
-0.2437
-0.2611
-0.2656
-0.1948
-0.1208
-0.1668
-0.1351
-0.2713
-0.0560
-0.2243
-0.1318
-0.2356
-0.2720
-0.2051
-0.1736
-0.2891
-0.2627
-0.3358
-0.1779
-0.2309
-0.1477
-0.2685
-0.1882
-0.2629
-0.1983
-0.3522
-0.1905
-0.2778
-0.3395
-0.2895
-0.2240
-0.1150
-0.2462
-0.2426
-0.2581
-0.3133
-0.2315
-0.2271
-0.2077
-0.2109
-0.1371
-0.1323
-0.2529
-0.1716
-0.2532
-0.2277
-0.2084
-0.1803
-0.1868
-0.2404
-0.2166
-0.2197
-0.2870
-0.3062
-0.1507
-0.1054
-0.2199
-0.2415
-0.3310
-0.2700
-0.1568
-0.1449
-0.2610
-0.1828
-0.2648
-0.3134
-0.2937
-0.2687
-0.2115
-0.2164
-0.4522
-0.2999
-0.3032
-0.2292
-0.3099
-0.2642
-0.2695
-0.1441
-0.1671
-0.1570
-0.1415
-0.2222
-0.1736
-0.1481
-0.2573
-0.2060
-0.1703
-0.2360
-0.1770
-0.2132
-0.2016
-0.3001
-0.1518
-0.2086
-0.2805
-0.2698
-0.2292
-0.1293
-0.2514
-0.2600
-0.2454
-0.1744
-0.1029
-0.1679
-0.2353
-0.2007
-0.3363
-0.1640
-0.2430
-0.1699
-0.1697
-0.1837
-0.1625
-0.2415
-0.2687
-0.2305
-0.2029
-0.2209
-0.2240
-0.2675
-0.3233
0.1462
-0.4777
-0.2376
-0.1489
-0.1462
-0.3055
-0.2234
-0.1697
-0.1952
-0.2131
-0.2340
-0.2039
-0.3054
-0.2596
-0.3470
-0.2176
-0.2706
-0.2897
-0.1729
-0.2300
-0.1066
-0.3556
-0.2912
-0.1777
-0.2007
-0.1699
-0.3009
-0.3046
-0.1693
-0.2602
-0.2053
-0.1810
-0.1808
-0.1730
-0.3757
-0.1808
-0.1805
-0.1895
-0.2643
-0.2075
-0.2365
-0.1975
-0.3064
-0.1984
-0.1811
-0.3676
-0.1198
-0.1485
-0.1770
-0.0781
-0.2052
-0.1360
-0.1417
-0.1691
-0.2395
-0.1785
-0.1747
-0.2484
-0.2717
-0.3096
-0.1465
-0.2239
-0.2584
-0.3572
-0.2311
-0.2878
-0.3841
-0.3475
-0.3896
-0.1891
-0.2861
-0.2431
-0.2837
-0.4365
-0.3353
-0.1802
-0.1976
-0.1529
-0.1978
-0.2535
-0.1954
-0.2667
-0.2813
-0.2487
-0.3070
-0.2339
-0.2212
-0.1925
-0.2224
-0.4178
-0.3151
-0.2663
-0.3581
-0.1935
-0.2385
-0.2424
-0.1850
-0.2265
-0.1803
-0.0777
-0.1492
-0.3361
-0.4133
-0.3123
-0.2745
-0.1247
-0.3102
0.0041
-0.1981
-0.3301
-0.2047
-0.1053
-0.1653
-0.1634
-0.1116
-0.2314
-0.3191
-0.1818
-0.2657
-0.2220
-0.1029
-0.1999
-0.2702
-0.2139
-0.2256
-0.2653
-0.1630
-0.3322
-0.1617
-0.3446
0.0288
-0.2456
-0.3171
-0.3580
-0.2857
-0.2520
-0.2031
-0.1522
-0.2203
-0.3490
-0.1685
-0.1424
-0.1602
-0.1553
-0.3057
-0.2420
-0.3536
-0.0551
-0.0987
-0.2272
-0.2619
-0.2035
-0.0906
-0.1976
-0.3040
-0.2732
-0.3161
-0.2102
-0.3384
-0.1740
-0.1475
-0.1842
-0.1823
-0.1151
-0.2183
-0.2010
-0.2659
-0.2205
-0.2567
-0.1633
-0.2213
-0.2658
-0.2938
-0.1069
-0.2522
-0.1103
-0.2216
-0.2244
-0.2908
-0.2176
-0.3605
-0.2374
-0.2391
-0.2251
-0.2256
-0.1339
-0.1970
-0.2970
-0.2206
-0.2051
-0.2229
-0.3602
-0.2923
-0.2498
-0.1466
-0.0979
-0.1686
-0.2158
-0.2881
-0.3002
-0.2760
-0.2496
-0.3536
-0.2868
-0.3251
-0.1847
-0.3062
-0.3861
-0.2650
-0.1339
-0.1846
-0.1630
-0.0630
-0.1717
-0.1415
-0.1906
-0.4611
-0.1391
-0.1920
-0.1369
-0.1647
-0.0055
-0.2598
-0.2653
-0.2319
-0.1780
-0.1913
-0.2055
-0.1891
-0.2625
-0.1633
-0.2497
-0.1696
-0.1907
-0.2431
-0.1825
-0.2607
-0.1943
-0.2361
-0.0581
-0.2758
-0.2593
-0.1466
-0.3589
-0.0439
-0.3440
-0.1089
-0.4219
-0.1503
-0.2792
-0.3035
-0.1156
[torch.FloatTensor of size 512]
), ('layer4.0.bn1.running_mean',
-0.1124
-0.1164
-0.1293
-0.4187
-0.3841
-0.4075
-0.5318
-0.1824
-0.7574
-0.8394
-0.1911
-0.2697
-0.4389
-0.2669
-0.4330
-0.4768
-0.4965
-0.4738
-0.1415
-0.4245
-0.3285
-0.5264
-0.8126
-0.4011
-0.3142
-0.4584
-0.1663
-0.4856
-0.3431
-0.5183
-0.4328
-0.6726
-0.4332
-0.4050
-0.1831
-0.4535
-1.0135
-0.0091
-0.4773
-0.3841
-0.5013
-0.7427
-0.4992
-0.5870
-0.3608
-0.4751
-0.6086
-0.3768
-0.6925
-1.2176
-0.5161
-0.4123
-0.3214
-0.1537
-0.3330
-0.3304
-0.4375
-0.5819
-0.4770
-0.5944
-0.2509
0.2625
0.1042
-0.3361
-0.4330
-0.4002
-0.3678
-0.3667
-0.2196
-0.6465
-0.5887
-0.3854
-0.3306
-0.3786
-0.2318
-0.0974
-1.0318
-0.8801
-0.3272
-0.4941
-0.6038
-0.4083
-0.1259
-0.1156
-0.1786
-0.5553
-0.7105
-0.2667
-0.1680
-0.0074
-0.2463
-0.3361
0.1572
-0.6019
-0.4686
-0.3578
-0.5812
-0.2113
-0.3591
-0.5293
-0.7721
-0.5846
-0.1129
-0.4135
-0.4965
-0.6759
-0.4077
-0.4894
-0.3329
-0.3689
-0.0139
-0.1107
-0.3289
0.0494
-0.1049
0.0325
-0.2145
-0.0585
-0.3660
-0.2958
-0.0878
-0.6473
-0.8958
-0.5207
-0.4756
-0.3351
-0.5421
0.0924
-0.9209
0.0610
0.0737
-0.3680
-0.7011
-0.5918
-0.5081
-0.4591
-0.5154
-0.3190
-0.6232
-0.5512
-0.4814
-0.4307
-0.2918
-0.2123
-0.2189
-0.4028
-0.1570
-0.1099
-0.3914
-0.3886
-0.1502
-0.4527
-0.1671
-0.2021
-0.5321
-0.2644
-0.5207
-0.5534
-0.5519
-0.3069
-0.2326
-0.5709
-0.6164
-0.0115
-0.6641
-0.5729
-0.2750
-0.5720
-0.7684
-0.4361
-0.3526
-0.0426
-0.1350
-0.8835
-0.3217
-0.1706
-0.4284
-0.4497
-0.7264
-0.9589
-0.3439
-0.6800
-0.4520
-0.5459
-0.2993
-0.4854
-0.1415
-0.0290
-0.2729
-0.1666
-0.2346
-0.5397
-0.4724
-0.5606
-0.5647
-0.3614
-0.5158
-0.2728
-0.0432
-0.9420
-0.5284
-0.6236
-0.3835
-0.6825
-0.5347
-0.4121
-0.2656
-0.5761
-0.3379
-0.7679
-0.8335
-0.5631
-0.3712
-0.0170
-0.5099
-0.5196
-0.2617
-0.5632
-0.6310
-0.5244
-0.2192
-0.4241
-0.2130
-0.2760
-0.1772
-0.5719
-0.4033
-0.7874
-0.3226
-0.2671
-0.3425
-0.7110
-0.4422
-0.1318
-0.3841
-0.4050
-0.5102
-0.4865
-0.5415
-0.4790
-0.4867
-0.2482
-0.5347
0.1373
-0.9281
-0.3791
-0.0393
-0.6500
-0.0687
-0.2550
-0.7833
-0.1906
0.0692
-0.5203
0.1102
-0.4691
-0.2165
-0.4058
-0.5252
-0.5489
-0.2243
-0.8912
-0.5753
-0.3787
-0.4660
-0.4167
-0.7948
-0.2214
0.2169
-0.3230
-0.5716
-0.4523
-0.2235
-0.5354
-0.6187
-0.6403
-0.5779
-0.6974
-0.4531
-0.4559
-0.6742
-0.8658
-0.6413
-0.3098
0.4122
-0.4813
-0.5268
0.1341
-0.1123
-0.3868
-0.6683
-0.4020
-0.4705
-0.5263
-0.4912
-0.4345
0.0675
-0.7317
-0.3467
-0.4757
-0.4845
-0.1666
-0.5546
-0.2875
-0.5574
-0.2929
-0.9178
0.0932
-0.3473
-0.2659
-0.8700
-0.4143
-0.6691
-0.3896
-0.3993
-0.3583
-0.9644
-0.5416
-0.3117
0.1785
-0.4971
-0.8436
-0.6282
-0.5113
-0.0999
-0.3834
-0.4330
-0.4084
-0.4269
-0.5670
-0.5599
0.2002
-0.3582
-0.7621
-0.4257
-0.4749
-0.2672
-0.4449
-0.4631
-0.5055
-0.3216
-0.5426
-0.2615
-0.5695
-0.2981
-0.8440
-0.6237
-0.6642
-0.4691
-0.9326
-0.6129
0.0988
-0.8381
-0.2735
-0.2299
-0.5881
-0.2101
-0.0520
-0.8218
-0.8467
-0.1617
-0.2244
-0.4366
-0.1205
-0.5751
-0.6796
-0.4050
-0.0679
-0.8405
-0.4547
-0.1708
-0.0480
-0.1587
-0.3734
-0.8896
0.0825
-0.7593
-0.4594
-0.2676
-0.1145
-0.3023
-0.2456
-0.3645
-0.3545
-0.8241
-0.1730
-0.2575
-0.0103
-0.3935
-0.7034
-0.2919
-0.2793
-0.3966
-0.7128
-0.5211
-0.7188
-0.4073
-0.2814
-0.2293
-0.4529
-0.6779
-0.0934
-0.3272
-0.3638
-0.2048
-0.2720
-0.3683
-0.3334
-0.6409
-0.2807
-0.4246
-0.0683
-0.3437
-0.1466
-0.6256
-0.4319
-0.1858
-0.1817
-0.7679
-0.3353
-0.9588
-0.1952
-0.2006
-0.1280
-0.4047
-0.2239
-0.6247
-0.3422
-0.5595
-0.7026
0.0354
-0.5814
-0.7382
-0.3904
-0.3409
-0.8630
-0.3453
-0.1569
-0.6717
-0.5216
-0.2329
-0.3564
-0.7584
-0.0124
-0.5398
-0.7708
-0.3359
-0.2909
-0.3133
-0.3400
-0.5624
-0.5493
-0.4637
-0.4022
-0.3580
-0.3325
-0.3384
-0.2277
-0.2697
-0.2907
-0.2164
-0.2489
-0.0573
-0.3735
-0.3996
-0.3451
-0.4648
-0.7143
-0.2062
-0.2513
-0.4464
-0.2624
-0.1615
-0.3099
-0.7480
-0.7751
-0.3383
-0.2875
-0.5976
-0.3752
-0.9447
0.9778
-0.5259
-0.0119
-0.3122
-0.3802
-0.7690
-0.3534
-0.3268
-0.3882
-0.4871
-0.4404
-0.6773
-0.4915
-0.4891
-0.3313
-0.6497
-0.5303
-0.6193
-0.8063
-0.4356
-0.0466
-0.6772
-0.7360
-0.6388
-0.4199
-0.4575
-0.5776
-0.5648
-0.2510
-0.2753
-0.4860
[torch.FloatTensor of size 512]
), ('layer4.0.bn1.running_var',
0.1030
0.0999
0.1279
0.0991
0.1004
0.1214
0.1621
0.1000
0.1600
0.1227
0.1053
0.1421
0.1020
0.1322
0.1003
0.1064
0.1366
0.1246
0.1300
0.1984
0.1163
0.1496
0.1630
0.1266
0.1066
0.1212
0.1068
0.1123
0.1615
0.1656
0.1303
0.1438
0.1148
0.1237
0.1166
0.1157
0.1930
0.1042
0.0904
0.0956
0.1063
0.0841
0.1163
0.1349
0.1176
0.1648
0.1115
0.1222
0.1269
0.1880
0.1090
0.1280
0.1211
0.1447
0.0857
0.1207
0.1129
0.1204
0.1305
0.1179
0.1167
0.2456
0.2022
0.0975
0.1263
0.1045
0.2082
0.0831
0.1038
0.1231
0.1124
0.1105
0.1467
0.1336
0.1288
0.1107
0.1366
0.1815
0.1282
0.1306
0.1200
0.1056
0.1189
0.1204
0.1113
0.0986
0.1033
0.1220
0.1178
0.1497
0.1115
0.1130
0.1207
0.1372
0.0990
0.1461
0.0985
0.1184
0.1507
0.1284
0.1085
0.1406
0.1754
0.1095
0.0977
0.0941
0.1799
0.1073
0.0796
0.1153
0.1189
0.1452
0.1099
0.1361
0.2592
0.0810
0.1098
0.1573
0.1272
0.1837
0.1360
0.0959
0.1403
0.1303
0.1342
0.0965
0.1285
0.2215
0.1247
0.1188
0.1204
0.1163
0.1006
0.1711
0.1036
0.1211
0.1361
0.1193
0.0970
0.1104
0.1331
0.1250
0.1168
0.0885
0.1163
0.0893
0.1681
0.0934
0.1244
0.1188
0.1640
0.1081
0.1310
0.1239
0.1269
0.0972
0.1211
0.1043
0.2294
0.1208
0.1018
0.1148
0.1472
0.0975
0.1309
0.1317
0.1914
0.1081
0.1491
0.1132
0.0993
0.1167
0.1473
0.1204
0.1012
0.1201
0.1062
0.1190
0.1336
0.1204
0.1411
0.1816
0.1134
0.1114
0.1735
0.1775
0.1215
0.1389
0.0983
0.1396
0.1208
0.1063
0.1136
0.1248
0.2639
0.1485
0.1077
0.1061
0.1551
0.1727
0.1251
0.1360
0.1216
0.1526
0.1103
0.1045
0.0857
0.1100
0.1190
0.1255
0.1585
0.1270
0.1328
0.1256
0.1079
0.1283
0.0715
0.1163
0.1375
0.0821
0.1461
0.1210
0.1227
0.1141
0.1072
0.1492
0.1203
0.2086
0.1131
0.0747
0.1385
0.1496
0.1287
0.0992
0.1335
0.1515
0.1861
0.1131
0.1649
0.0937
0.1362
0.0959
0.0870
0.1182
0.1449
0.1577
0.1250
0.1298
0.1252
0.1016
0.1787
0.1136
0.1204
0.1126
0.1295
0.1371
0.1136
0.1469
0.1608
0.0994
0.2343
0.1397
0.1145
0.1336
0.1430
0.1121
0.1389
0.1633
0.1050
0.1538
0.1095
0.1131
0.1005
0.1411
0.1132
0.1021
0.1399
0.1045
0.1220
0.1303
0.1541
0.1425
0.1217
0.0941
0.2309
0.0997
0.0928
0.1263
0.1255
0.0886
0.1247
0.1293
0.1076
0.1414
0.0935
0.1377
0.1662
0.1451
0.1051
0.1116
0.1321
0.1895
0.1370
0.1350
0.1285
0.0951
0.1843
0.2134
0.1534
0.1752
0.1721
0.0990
0.1510
0.1052
0.1361
0.1463
0.1610
0.1596
0.1157
0.0883
0.1122
0.1188
0.1181
0.1356
0.1305
0.1074
0.1093
0.1180
0.0961
0.2218
0.1376
0.1151
0.1370
0.1296
0.1005
0.0916
0.1126
0.0927
0.1432
0.0977
0.1235
0.1426
0.1382
0.1386
0.1177
0.1315
0.1288
0.1192
0.1377
0.1368
0.1468
0.1088
0.1311
0.1216
0.1078
0.1238
0.1370
0.1052
0.1886
0.1694
0.1326
0.1314
0.1080
0.1096
0.1218
0.1724
0.1309
0.0959
0.1403
0.1235
0.0953
0.1568
0.1517
0.1197
0.2459
0.1258
0.0967
0.0933
0.0624
0.1311
0.0904
0.1298
0.1141
0.1106
0.1079
0.1113
0.1231
0.1412
0.1160
0.1561
0.2678
0.1610
0.1171
0.1981
0.1232
0.1209
0.1024
0.1028
0.1370
0.0792
0.1250
0.1407
0.1429
0.1258
0.1135
0.1144
0.1183
0.1113
0.1903
0.1068
0.1368
0.1185
0.1132
0.1182
0.1056
0.0920
0.1041
0.1524
0.1525
0.1355
0.2332
0.1436
0.1497
0.1969
0.1629
0.1681
0.1129
0.1383
0.1483
0.1190
0.1353
0.1182
0.1201
0.1007
0.1146
0.1776
0.1119
0.1096
0.1113
0.1581
0.0983
0.1244
0.1458
0.1137
0.1371
0.1809
0.2130
0.1372
0.0939
0.1047
0.1311
0.1918
0.1234
0.0939
0.1210
0.1386
0.0903
0.1178
0.1601
0.1571
0.1462
0.1532
0.2961
0.0971
0.1383
0.1163
0.0939
0.1179
0.1066
0.1472
0.1305
0.1430
0.1198
0.1306
0.1825
0.1339
0.1046
0.0709
0.1584
0.1002
0.1495
0.1604
0.1422
0.1146
0.0903
0.0900
0.1229
0.1567
0.1262
0.1163
0.1507
0.1026
0.1223
0.1404
0.1365
0.1182
0.0923
0.1189
0.1092
0.1069
0.1234
0.2338
0.1229
0.1110
0.0989
0.1133
0.0932
0.1583
0.1236
0.1395
0.1109
0.1057
[torch.FloatTensor of size 512]
), ('layer4.0.conv2.weight',
( 0 , 0 ,.,.) =
1.6218e-04 -1.4720e-02 -1.7000e-02
-1.2850e-02 -3.3085e-02 -3.6656e-02
2.7812e-02 1.7691e-02 -1.8369e-02
( 0 , 1 ,.,.) =
1.0528e-02 3.1379e-02 2.4801e-02
-1.2698e-02 -2.9453e-02 -1.1834e-02
-9.4094e-03 -8.9462e-03 -3.1349e-02
( 0 , 2 ,.,.) =
-7.8447e-03 -2.9256e-02 5.3590e-03
-1.3791e-02 -1.1116e-02 5.0388e-03
-2.4919e-03 7.3514e-03 5.4013e-03
...
( 0 ,509,.,.) =
-1.0276e-03 -1.0275e-02 -2.9986e-02
-3.8465e-03 1.9549e-03 -1.6291e-02
-1.8100e-03 8.3778e-03 -8.5481e-03
( 0 ,510,.,.) =
-1.8196e-02 -1.3533e-02 -1.7457e-02
2.2457e-02 5.7402e-02 1.9325e-02
-2.4977e-02 -3.2113e-02 -8.1780e-03
( 0 ,511,.,.) =
3.6550e-03 4.9358e-03 -5.7597e-03
-1.6875e-02 1.3999e-04 3.7629e-04
-2.6272e-03 1.0947e-03 1.1145e-03
⋮
( 1 , 0 ,.,.) =
1.4018e-02 3.9198e-03 -1.7189e-03
-1.3175e-03 4.3503e-04 -1.1798e-02
-9.8003e-03 -1.7693e-02 -1.9910e-02
( 1 , 1 ,.,.) =
-1.4957e-02 -1.9796e-02 -2.8724e-02
5.8908e-03 -1.5228e-02 -5.6715e-03
2.9284e-03 -1.8028e-02 -7.1433e-03
( 1 , 2 ,.,.) =
-1.1625e-02 -3.3804e-02 -1.0025e-02
-1.6606e-02 -5.5716e-02 -2.3204e-02
-2.5758e-02 -4.3135e-02 -2.5901e-02
...
( 1 ,509,.,.) =
-1.5007e-02 -1.4333e-02 -2.5937e-03
-2.3078e-02 -1.5820e-02 -2.2818e-03
-4.1318e-03 -8.0353e-03 -2.3236e-03
( 1 ,510,.,.) =
-1.8531e-02 -1.8004e-02 -2.8084e-02
-3.6680e-02 -6.8641e-02 -5.2469e-02
-1.1712e-02 -2.4334e-02 -1.6733e-02
( 1 ,511,.,.) =
-2.2078e-02 -2.9163e-02 -3.8717e-03
-7.0301e-03 1.6718e-02 5.4339e-03
-1.3131e-02 1.1999e-02 -1.7480e-02
⋮
( 2 , 0 ,.,.) =
-5.2378e-03 -3.4890e-03 -2.0851e-03
1.5306e-02 -2.1752e-02 -8.7682e-03
2.2460e-02 9.9175e-03 -3.3635e-03
( 2 , 1 ,.,.) =
7.4677e-03 -9.1762e-03 -9.2569e-05
1.9441e-04 1.2344e-03 -8.9978e-03
-5.1243e-04 2.1850e-04 -4.8828e-03
( 2 , 2 ,.,.) =
1.7078e-02 3.3955e-03 9.3503e-03
2.0334e-02 -1.0621e-04 -8.2017e-05
1.0706e-02 -1.8414e-03 1.0828e-02
...
( 2 ,509,.,.) =
3.2008e-02 2.3494e-02 2.5386e-02
1.9307e-02 2.3924e-02 2.8972e-02
9.9003e-03 2.0158e-02 2.2655e-02
( 2 ,510,.,.) =
-9.8395e-03 -1.1114e-02 -3.7696e-03
-2.9508e-02 -3.6956e-02 -1.8228e-02
-1.3663e-03 -2.5845e-03 1.0352e-02
( 2 ,511,.,.) =
-7.3867e-03 -2.5413e-02 -2.1942e-02
-1.6699e-02 -1.5133e-02 -1.3030e-02
-2.0090e-02 3.7970e-03 -1.0341e-02
...
⋮
(509, 0 ,.,.) =
-1.6157e-02 -1.6883e-02 -2.8328e-04
-7.7759e-03 -2.4465e-03 -1.4641e-02
2.4639e-02 3.9862e-02 2.1048e-02
(509, 1 ,.,.) =
2.4491e-03 -9.3885e-03 -1.1786e-02
2.5301e-02 2.5625e-04 7.1335e-03
2.2342e-02 1.9042e-02 7.2526e-03
(509, 2 ,.,.) =
-1.4652e-02 -2.7802e-02 -4.3564e-03
-1.7961e-02 -4.3846e-02 2.7409e-03
-4.7968e-03 -8.4231e-03 1.2070e-02
...
(509,509,.,.) =
-2.0171e-02 -3.3546e-02 -1.6728e-02
-1.7847e-02 -5.1713e-02 -2.6780e-02
-1.3145e-03 -4.3181e-03 -9.6373e-03
(509,510,.,.) =
-5.3917e-03 -2.0410e-04 2.7798e-03
-9.6882e-04 -2.5141e-02 1.4804e-02
2.8748e-02 9.0832e-03 4.2548e-02
(509,511,.,.) =
-1.5698e-02 -1.9303e-02 -9.1469e-03
-2.0025e-02 -1.1131e-02 -3.3902e-02
-5.7436e-03 -7.3640e-03 -1.0044e-02
⋮
(510, 0 ,.,.) =
-8.8612e-03 -4.5370e-03 -1.2354e-02
-5.9245e-03 -1.7058e-02 -2.8041e-02
-1.0435e-02 7.6695e-04 -1.0578e-02
(510, 1 ,.,.) =
9.5200e-03 -5.1975e-03 1.2947e-02
4.4305e-03 -2.3992e-02 -8.4569e-04
4.6608e-03 9.6787e-03 8.2174e-03
(510, 2 ,.,.) =
5.1559e-03 4.4635e-04 -7.9934e-03
3.3069e-03 1.4450e-02 8.9234e-03
6.3402e-03 1.9043e-02 1.9021e-02
...
(510,509,.,.) =
7.6964e-03 -1.3777e-02 6.0539e-03
-1.5745e-03 -2.3391e-02 -1.0052e-02
9.5183e-03 -1.2251e-02 2.2436e-03
(510,510,.,.) =
1.0375e-02 3.5875e-03 -5.7940e-04
7.0412e-03 -1.0673e-02 -4.9120e-03
-2.6034e-03 1.1306e-02 7.0696e-03
(510,511,.,.) =
-1.7509e-02 -2.3182e-02 -1.7897e-02
-1.7769e-03 1.9672e-03 -7.3220e-03
-6.6833e-03 9.8286e-03 2.0653e-03
⋮
(511, 0 ,.,.) =
2.8375e-02 -8.1936e-03 1.8009e-02
1.5829e-02 -1.3571e-02 -1.9335e-02
4.0766e-03 -1.5722e-02 -5.0620e-02
(511, 1 ,.,.) =
-5.5310e-03 -1.8996e-02 -7.9436e-03
1.3825e-03 -4.9608e-02 1.7256e-03
7.6629e-03 -7.6101e-03 1.2541e-02
(511, 2 ,.,.) =
1.8052e-02 3.1718e-02 4.2556e-03
-3.6760e-03 3.0490e-03 -1.2264e-02
-8.9404e-03 -1.6604e-02 1.6348e-03
...
(511,509,.,.) =
5.3192e-03 1.8204e-02 1.8114e-02
-6.1202e-03 1.5905e-03 2.0264e-02
-1.1471e-02 -1.5697e-02 9.0871e-03
(511,510,.,.) =
3.7707e-03 8.0599e-03 1.8290e-02
1.7257e-02 6.9638e-03 1.8746e-02
1.0751e-02 1.3663e-02 -1.0081e-03
(511,511,.,.) =
1.9711e-02 -1.4569e-02 -2.4663e-02
2.5966e-03 -2.4807e-02 9.3861e-03
-1.2876e-03 1.3974e-03 1.3434e-02
[torch.FloatTensor of size 512x512x3x3]
), ('layer4.0.bn2.weight',
0.4474
0.5138
0.4335
0.3421
0.3855
0.3495
0.3741
0.5836
0.4327
0.5043
0.4618
0.3866
0.3498
0.4798
0.3310
0.3913
0.3880
0.5225
0.3975
0.3292
0.4151
0.4458
0.3970
0.3614
0.3914
0.4633
0.3463
0.3644
0.3272
0.4584
0.4280
0.4538
0.4030
0.4673
0.4209
0.3987
0.4233
0.3876
0.4212
0.3460
0.3522
0.3744
0.4550
0.2888
0.4590
0.4817
0.4450
0.5110
0.4052
0.4247
0.3558
0.3075
0.4462
0.4724
0.4253
0.3884
0.4492
0.3727
0.4630
0.3985
0.3512
0.3665
0.3860
0.5082
0.4022
0.3458
0.4805
0.5390
0.4223
0.4275
0.4590
0.4736
0.3673
0.5405
0.3243
0.5178
0.4743
0.3506
0.3759
0.4328
0.3867
0.4591
0.3843
0.4982
0.5288
0.3946
0.4589
0.3197
0.4676
0.4806
0.4308
0.4235
0.3284
0.3877
0.4140
0.4469
0.4041
0.4407
0.4356
0.5120
0.5059
0.4628
0.4585
0.3311
0.3424
0.4150
0.5170
0.4593
0.5228
0.4252
0.4214
0.4995
0.4098
0.5380
0.4874
0.3719
0.4649
0.4320
0.3277
0.3743
0.4360
0.4838
0.4399
0.3763
0.4150
0.5147
0.5012
0.4382
0.3655
0.4037
0.4498
0.4720
0.3914
0.3237
0.3208
0.3224
0.4291
0.4009
0.3947
0.3779
0.4349
0.4120
0.3274
0.4334
0.3740
0.4189
0.4288
0.3071
0.4260
0.3410
0.4375
0.4407
0.3750
0.5853
0.4518
0.5045
0.3005
0.4968
0.4155
0.3755
0.5514
0.4146
0.4677
0.1404
0.5001
0.4193
0.4246
0.4452
0.5109
0.4488
0.4574
0.3896
0.4145
0.4497
0.4245
0.3971
0.3957
0.4072
0.5305
0.4986
0.3733
0.4280
0.3469
0.4178
0.3766
0.4029
0.3814
0.4493
0.5132
0.4080
0.4155
0.3635
0.4391
0.3489
0.4228
0.4833
0.3494
0.4406
0.3795
0.4298
0.4910
0.3878
0.6299
0.4322
0.5436
0.4140
0.4312
0.3161
0.3612
0.3597
0.4281
0.4506
0.4294
0.3646
0.4110
0.4038
0.4098
0.3901
0.3928
0.5421
0.3629
0.4078
0.4586
0.4217
0.3953
0.3997
0.3838
0.4374
0.3576
0.4217
0.4128
0.3904
0.4137
0.5145
0.4039
0.3577
0.4429
0.5639
0.3848
0.6104
0.4482
0.6203
0.5336
0.3480
0.5401
0.6044
0.4077
0.3469
0.4281
0.4631
0.5948
0.3479
0.3689
0.3658
0.3191
0.5492
0.3410
0.5386
0.4041
0.3373
0.4186
0.5187
0.3933
0.3188
0.3502
0.3736
0.4238
0.4752
0.3322
0.5078
0.4317
0.5318
0.4413
0.5510
0.5648
0.4130
0.4017
0.4304
0.4077
0.4285
0.4360
0.3749
0.4261
0.3905
0.3030
0.3412
0.3768
0.4507
0.3127
0.4592
0.4298
0.3936
0.3106
0.3869
0.3594
0.4046
0.4722
0.4373
0.3902
0.3515
0.4448
0.4299
0.4347
0.4693
0.4807
0.2549
0.4171
0.4387
0.4156
0.3976
0.4092
0.4953
0.4824
0.3468
0.4382
0.4179
0.4668
0.3299
0.5986
0.4949
0.4167
0.4996
0.4528
0.4550
0.4945
0.3415
0.4658
0.4356
0.3976
0.5439
0.4643
0.5122
0.4669
0.4463
0.4810
0.3492
0.3961
0.3593
0.4053
0.3878
0.3959
0.5001
0.2808
0.5470
0.4448
0.4894
0.4621
0.3417
0.3485
0.5060
0.3637
0.3774
0.3248
0.4520
0.3936
0.3403
0.4660
0.4114
0.3643
0.4196
0.3903
0.5128
0.4221
0.4115
0.4240
0.3610
0.4999
0.3672
0.4721
0.4252
0.5590
0.4694
0.7322
0.5849
0.4749
0.4426
0.3934
0.3909
0.4576
0.3636
0.4146
0.4129
0.5081
0.3681
0.3652
0.4254
0.2945
0.4142
0.3145
0.4304
0.4252
0.3493
0.4257
0.5133
0.3261
0.4367
0.3637
0.3712
0.4183
0.3772
0.4418
0.4231
0.4133
0.4731
0.4955
0.4046
0.4079
0.4719
0.3875
0.4673
0.4129
0.4569
0.3530
0.4793
0.3844
0.3785
0.3343
0.4351
0.6512
0.4295
0.4122
0.3788
0.3692
0.4343
0.4214
0.3873
0.4566
0.4456
0.4107
0.4596
0.7082
0.4452
0.3515
0.4785
0.4217
0.5756
0.4312
0.4047
0.4043
0.4764
0.5489
0.4430
0.5559
0.3744
0.3951
0.4376
0.4752
0.4340
0.4399
0.3586
0.4161
0.3930
0.4599
0.4354
0.3448
0.4649
0.4442
0.4275
0.3881
0.3247
0.4909
0.3426
0.3989
0.4320
0.3363
0.3991
0.4732
0.3514
0.4736
0.4244
0.4603
0.3298
0.4357
0.4353
0.3742
0.4191
0.3880
0.4212
0.4527
0.7213
0.3969
0.5217
0.3786
0.3512
0.5318
0.4138
0.3243
0.3244
0.3652
0.4774
0.3997
0.2800
0.4562
0.4463
0.4816
0.4290
0.4399
0.4633
0.3575
0.4774
0.3105
0.4356
0.3797
0.4304
0.4261
0.3740
0.3370
0.3917
0.3637
0.4347
0.5235
0.3845
[torch.FloatTensor of size 512]
), ('layer4.0.bn2.bias',
-0.1759
-0.2156
-0.2047
-0.1695
-0.1628
-0.1473
-0.2158
-0.2905
-0.1112
-0.2196
-0.1020
-0.1549
-0.1989
-0.0445
-0.1508
-0.1920
-0.2114
-0.1655
-0.1854
-0.1733
-0.1289
-0.2376
-0.1965
-0.1965
-0.1776
-0.1774
-0.1760
-0.1546
-0.1648
-0.2599
-0.1752
-0.2498
-0.1741
-0.2410
-0.2498
-0.2938
-0.1496
-0.1578
-0.1800
-0.1851
-0.1516
-0.1345
-0.2746
-0.1248
-0.2246
-0.2531
-0.2398
-0.1859
-0.1739
-0.2393
-0.1214
-0.1803
-0.2729
-0.2617
-0.1855
-0.2316
-0.2333
-0.1860
-0.2097
-0.0692
-0.1912
-0.2078
-0.1084
-0.2810
-0.1303
-0.1654
-0.2119
-0.3641
-0.2951
-0.2384
-0.1632
-0.1892
-0.1792
-0.2031
-0.1770
-0.2738
-0.3324
-0.1725
-0.1793
-0.2638
-0.2207
-0.1609
-0.1534
-0.1414
-0.2992
-0.1450
-0.1838
-0.1779
-0.1422
-0.2198
-0.1900
-0.1580
-0.1666
-0.2490
-0.1569
-0.1718
-0.1660
-0.1972
-0.2287
-0.2366
-0.2230
-0.1543
-0.2030
-0.1431
-0.1363
-0.2015
-0.1804
-0.2093
-0.2964
-0.1984
-0.2683
-0.2216
-0.2147
-0.3404
-0.2668
-0.1890
-0.1733
-0.2226
-0.1772
-0.1698
-0.1095
-0.2180
-0.1154
-0.1654
-0.1910
-0.3535
-0.3112
-0.2161
-0.1496
-0.1667
-0.2849
-0.2207
-0.1529
-0.1807
-0.2118
-0.1869
-0.1376
-0.1770
-0.1861
-0.1969
-0.1741
-0.3011
-0.0787
-0.2017
-0.1947
-0.2247
-0.2459
-0.1058
-0.1401
-0.1213
-0.1199
-0.1760
-0.2156
-0.3307
-0.3515
-0.2366
-0.1185
-0.2155
-0.1751
-0.1892
-0.3365
-0.1598
-0.2554
0.0644
-0.2856
-0.1198
-0.1583
-0.2297
-0.3352
-0.1987
-0.2686
-0.1632
-0.2461
-0.2900
-0.2428
-0.1449
-0.1900
-0.2149
-0.1541
-0.2917
-0.2504
-0.2213
-0.0463
-0.1547
-0.1511
-0.1527
-0.1735
-0.1931
-0.1987
-0.2239
-0.2086
-0.2688
-0.1845
-0.1797
-0.1833
-0.3880
-0.1539
-0.1553
-0.1567
-0.2238
-0.1511
-0.2540
-0.2849
-0.1826
-0.2687
-0.2328
-0.2108
-0.2410
-0.1022
-0.1507
-0.1978
-0.1734
-0.2282
-0.0985
-0.1847
-0.1770
-0.1576
-0.1937
-0.1643
-0.2822
-0.1866
-0.2754
-0.2266
-0.2169
-0.1352
-0.2194
-0.1060
-0.2139
-0.1322
-0.1889
-0.2130
-0.1913
-0.2364
-0.1402
-0.2228
-0.2354
-0.1632
-0.1905
-0.1428
-0.1177
-0.2419
-0.2733
-0.2963
-0.1600
-0.3558
-0.3673
-0.2201
-0.1505
-0.2084
-0.0870
-0.2052
-0.2070
-0.1986
-0.2299
-0.0745
-0.1765
-0.1412
-0.2180
-0.1450
-0.1426
-0.1452
-0.2916
-0.0871
-0.1359
-0.2003
-0.1125
-0.2588
-0.1988
-0.2028
-0.2443
-0.0864
-0.3415
-0.2579
-0.2343
-0.3552
-0.1859
-0.1153
-0.1732
-0.1780
-0.1909
-0.2018
-0.1886
-0.2751
-0.1501
0.1165
-0.1891
-0.1845
-0.2037
-0.0339
-0.3464
-0.1956
-0.1962
-0.1537
-0.1902
-0.1431
-0.3022
-0.1780
-0.1971
-0.2118
-0.0952
-0.1711
-0.2409
-0.2184
-0.2114
-0.2042
-0.0566
-0.0700
-0.2081
-0.1872
-0.2079
-0.1540
-0.2266
-0.1981
-0.1679
-0.2022
-0.2010
-0.1051
-0.1705
-0.2139
0.0396
-0.1077
-0.2745
-0.2690
-0.2603
-0.2819
-0.1917
-0.1940
-0.2944
-0.1822
-0.2903
-0.1064
-0.2076
-0.2648
-0.3032
-0.2878
-0.1579
-0.0071
-0.2142
-0.2022
-0.1516
-0.1123
0.0246
-0.0978
-0.1382
-0.1800
-0.3214
-0.2179
-0.1369
-0.0800
0.0117
-0.1839
-0.1926
-0.1614
-0.2769
-0.1909
-0.2101
-0.2305
-0.2055
-0.2017
-0.2741
-0.1005
-0.3152
-0.1121
-0.1700
-0.1364
-0.2157
-0.2673
-0.1584
-0.1997
-0.1745
-0.1886
-0.2307
-0.2024
-0.3376
-0.2266
-0.2355
-0.2133
-0.2346
-0.2412
-0.2358
-0.1265
-0.2341
-0.1887
-0.1646
-0.1417
-0.1882
-0.1076
-0.3048
-0.1162
-0.1651
-0.2046
-0.1833
-0.3102
-0.1778
-0.1575
-0.2676
-0.1777
-0.1569
-0.1741
-0.1892
-0.3028
-0.1457
-0.2179
-0.2226
-0.1609
-0.1423
-0.2683
-0.2920
-0.1740
-0.2079
-0.1940
-0.2679
-0.1973
-0.1951
-0.1665
-0.2286
-0.1903
-0.2667
-0.4010
-0.2550
-0.1817
-0.2025
-0.1589
-0.2476
-0.0573
-0.2203
-0.2084
-0.1587
-0.1212
-0.1795
-0.3449
-0.1662
-0.2523
-0.2435
-0.2878
-0.2797
-0.1897
-0.2113
-0.1943
-0.2050
-0.1694
-0.2243
-0.2987
-0.1328
-0.1428
-0.2399
-0.1593
-0.1999
-0.3225
-0.1860
-0.1763
-0.2691
-0.2097
-0.2396
-0.1140
-0.1897
-0.1870
-0.1829
-0.2615
-0.2073
-0.1858
-0.0598
-0.1915
-0.2183
-0.2088
-0.1742
-0.2715
-0.1999
-0.2117
-0.2492
-0.1717
-0.1566
-0.1669
-0.3015
-0.1685
-0.2434
-0.2297
-0.1947
-0.2860
-0.3288
-0.2197
-0.1862
-0.1755
-0.0987
-0.1756
-0.1304
-0.1555
-0.1679
-0.2222
-0.2819
-0.2652
-0.0947
-0.2412
-0.2731
-0.2572
-0.2604
-0.2934
-0.2470
-0.1820
-0.2740
-0.1336
-0.1698
-0.1919
-0.1796
-0.2325
-0.1352
-0.1077
-0.2184
-0.1539
-0.2015
-0.3243
-0.1713
[torch.FloatTensor of size 512]
), ('layer4.0.bn2.running_mean',
-0.2323
-0.2009
-0.1230
-0.1102
-0.0945
-0.1073
-0.1357
-0.1954
-0.1826
-0.1890
-0.1432
-0.1667
-0.1086
-0.1213
-0.1614
-0.1109
-0.1794
-0.1853
-0.1421
-0.1549
-0.1322
-0.1870
-0.1730
-0.1042
-0.1547
-0.1679
-0.1846
-0.1568
-0.1340
-0.0786
-0.1664
-0.1481
-0.1538
-0.1475
-0.1476
-0.1409
-0.2402
-0.0707
-0.0515
-0.1052
-0.1535
-0.2514
-0.1963
-0.1318
-0.1389
-0.1726
-0.2069
-0.1794
-0.0709
-0.1851
-0.1337
-0.0983
-0.1463
-0.1685
-0.1355
-0.1603
-0.1008
-0.1787
-0.2180
-0.1460
-0.1948
-0.1348
-0.2020
-0.1971
-0.1880
-0.0911
-0.1778
-0.1945
-0.0790
-0.2138
-0.1080
-0.1863
-0.1487
-0.1820
-0.1090
-0.1556
-0.1834
-0.1325
-0.1903
-0.1287
-0.1414
-0.0978
-0.0961
-0.1062
-0.1628
-0.1549
-0.1931
-0.1080
-0.1673
0.0160
-0.1061
-0.0820
-0.1730
-0.1498
-0.1451
-0.1533
-0.0621
-0.1445
-0.0918
-0.1844
-0.2001
-0.1960
-0.2816
-0.1050
-0.0827
-0.0967
-0.1907
-0.2045
-0.1425
-0.1722
-0.1856
-0.1481
-0.0500
-0.1964
-0.2529
-0.1305
-0.1833
-0.1010
-0.1678
-0.0904
-0.1421
-0.1829
-0.1483
-0.1604
-0.2044
-0.2470
-0.2574
0.0411
-0.1046
-0.0987
-0.1557
-0.1563
-0.1190
-0.0537
-0.1004
-0.1289
-0.1472
-0.1177
-0.1001
-0.1697
-0.1182
-0.1078
-0.0982
-0.0848
-0.1159
-0.2130
-0.1836
-0.1310
-0.1298
-0.1068
-0.1665
-0.1800
-0.1908
-0.1894
-0.1793
-0.1248
-0.1584
-0.0122
-0.1105
-0.0558
-0.1281
-0.0900
-0.1077
-0.0362
-0.1808
-0.1684
-0.1897
-0.1344
-0.1439
-0.1051
-0.1875
-0.1760
-0.0602
-0.1801
-0.1497
-0.0929
-0.1682
-0.1124
-0.2335
-0.1392
-0.1584
-0.1489
-0.0831
-0.1165
-0.1117
-0.1485
-0.2255
-0.0950
-0.3208
-0.1579
-0.2568
-0.0963
-0.1540
-0.0998
-0.1362
-0.2135
-0.2037
-0.1357
-0.1213
-0.1102
-0.1944
-0.1846
-0.1457
-0.1200
-0.1515
-0.1240
-0.1507
-0.1459
-0.1838
-0.0138
-0.1594
-0.1894
-0.1916
-0.0972
-0.1900
-0.0623
-0.1021
-0.0999
-0.2354
-0.2060
-0.1587
-0.0965
-0.0440
-0.1450
-0.2433
-0.1366
-0.1435
-0.1234
-0.0996
-0.1855
-0.1259
-0.1713
-0.2071
0.3675
-0.1830
-0.0825
-0.1592
0.4767
-0.1776
-0.2842
-0.1735
-0.2587
-0.0918
-0.0702
-0.1917
-0.2316
-0.1425
-0.2009
-0.1625
-0.0506
-0.1747
-0.1638
-0.0841
-0.0549
-0.1516
-0.2360
-0.1172
-0.1092
-0.0038
-0.1679
-0.1220
-0.0646
-0.1783
-0.1515
-0.0512
-0.0918
-0.0816
-0.1421
-0.1359
0.0083
-0.2484
-0.1884
-0.0736
-0.1139
-0.2213
-0.1780
-0.1929
-0.1703
-0.1334
-0.2096
-0.1853
-0.1166
-0.1438
-0.1881
0.1436
-0.0686
-0.1421
-0.1335
-0.1524
-0.2322
-0.2406
-0.0871
-0.1397
-0.1480
-0.1512
-0.1262
-0.1244
-0.1173
-0.1291
-0.1326
-0.1113
-0.2080
-0.1329
-0.1498
-0.1469
-0.1715
-0.1090
-0.1383
-0.0470
-0.1454
-0.1737
-0.2443
-0.1302
-0.0830
-0.1078
-0.1338
-0.1451
-0.1278
-0.1947
-0.0877
-0.1288
-0.1151
-0.1809
-0.1068
-0.1797
-0.1599
-0.1411
-0.2104
-0.1189
-0.1548
-0.1769
-0.2152
-0.2549
-0.1977
-0.1385
-0.2339
-0.2481
-0.0978
-0.0979
-0.0697
-0.1257
-0.0927
-0.1224
-0.1986
-0.1570
-0.1845
-0.1944
-0.1407
-0.1315
-0.1657
-0.1823
-0.1973
-0.1310
-0.1542
-0.1280
-0.0570
-0.1538
-0.1152
-0.1496
-0.0477
-0.1515
-0.1121
-0.1667
-0.1651
-0.0415
-0.1299
-0.1688
-0.1143
-0.0564
-0.0792
-0.2140
-0.1535
-0.3087
-0.1927
-0.1168
-0.1704
-0.1362
-0.1538
-0.0804
-0.1526
-0.1125
-0.1438
-0.0565
-0.0919
-0.1430
-0.1173
-0.2215
-0.1624
-0.2277
-0.1165
-0.1816
-0.0899
-0.0536
-0.1443
-0.1539
-0.1223
-0.1396
-0.1891
-0.1502
-0.1522
-0.1075
-0.0810
-0.1276
-0.2212
-0.0747
-0.1313
-0.1488
-0.1832
-0.1005
-0.1688
-0.2087
-0.2055
-0.0563
-0.1815
-0.0792
-0.1381
-0.0926
-0.1136
-0.2200
-0.1026
-0.1947
-0.0892
-0.1711
-0.0661
-0.1989
-0.1249
-0.1797
-0.2079
-0.1755
-0.0912
-0.3122
-0.1278
-0.1374
-0.2122
-0.1040
-0.1104
-0.2023
-0.1358
-0.1437
-0.1378
-0.1535
-0.1709
-0.2460
-0.1195
-0.1224
-0.1485
-0.2050
-0.1663
-0.1218
-0.1084
-0.0843
-0.1545
-0.1624
-0.0402
-0.0948
-0.1233
-0.1247
-0.0949
-0.1890
-0.1409
-0.1957
-0.1521
-0.1791
-0.1771
-0.0799
-0.1515
-0.2390
-0.0979
-0.1374
-0.1728
-0.0893
-0.2172
-0.1527
-0.1728
-0.1136
-0.1325
-0.2203
-0.1285
-0.1800
-0.2360
-0.1478
-0.0860
-0.1360
-0.1366
-0.1958
-0.1150
-0.1826
-0.2315
-0.1288
-0.2192
-0.0944
-0.1645
-0.1989
-0.1677
-0.1951
-0.1835
-0.0518
-0.0916
-0.1564
-0.1527
-0.1834
-0.1424
-0.0484
-0.0799
-0.0930
-0.0937
-0.1105
-0.1594
-0.1454
-0.2459
-0.0755
-0.2106
[torch.FloatTensor of size 512]
), ('layer4.0.bn2.running_var',
1.00000e-02 *
2.4833
3.3080
2.0296
1.5249
2.0692
1.8305
2.0471
2.8226
3.0079
2.4272
2.7239
2.1119
1.6120
4.8926
1.5775
1.9033
2.0561
3.5445
2.2633
1.4559
2.1948
2.6594
2.5071
1.5912
1.9282
2.1082
1.9509
1.9900
1.6651
1.8938
2.1580
2.5958
2.0472
2.1154
1.6775
1.4313
2.8038
2.0398
2.3671
1.3760
1.9427
2.1632
2.3166
1.3648
2.5834
2.0891
1.9066
3.4593
2.1960
2.2518
2.0690
1.3641
1.6057
1.9966
2.0539
1.7946
1.7566
1.9128
2.2047
2.9701
1.7670
1.9960
2.9041
2.3745
2.3840
2.0386
2.5736
1.7321
1.6626
1.8936
3.6740
2.3555
1.7346
2.9061
1.7480
2.0982
1.6436
1.7391
2.2283
1.9045
1.5922
2.6576
1.8965
2.4633
2.2448
2.3271
2.6828
1.5013
3.4970
2.7197
2.4104
2.4977
1.8593
1.8319
2.3605
2.9364
2.0061
2.1858
2.2766
2.0778
3.7099
2.7477
2.4862
1.7150
1.6191
1.5232
3.0046
2.6621
1.8450
2.9335
1.7999
2.5333
1.8225
2.6072
2.3344
1.9952
2.7224
3.9102
1.7148
1.8970
2.6572
2.3887
2.5440
1.9029
1.8488
1.9150
2.2768
2.1362
1.5905
2.0834
2.0401
2.5575
2.2002
1.2720
1.5156
1.7273
2.4564
2.0573
1.9230
1.7903
2.1950
1.9275
1.9678
2.0337
2.0774
2.3042
2.2799
1.7380
2.5705
2.0541
3.0618
2.0408
1.8540
2.5696
1.4412
2.2202
1.8074
3.2491
2.3889
1.7946
1.9074
2.2918
1.8890
2.9527
2.1006
2.5455
2.3745
2.0723
2.0327
2.0734
2.0228
1.9176
1.7930
2.2085
1.7270
2.3272
2.2734
1.8007
3.7277
2.0109
2.5690
2.3128
2.7003
2.4481
2.0348
2.9298
1.9656
2.0298
3.2104
1.7097
2.0729
1.6681
2.9341
1.3314
2.2363
1.5633
1.8116
3.0468
2.0086
2.1300
2.8081
1.7087
3.4536
2.1716
2.4298
1.6968
2.1991
1.4881
1.9965
1.6619
2.4966
2.3971
1.9127
2.3055
2.0037
2.4586
2.4219
1.9185
2.3733
2.4952
2.1067
1.6952
2.2617
1.6901
2.5003
1.8883
1.9898
2.0216
2.0317
2.4188
1.9648
1.8298
1.8622
6.7734
1.8365
1.4915
2.3664
8.2619
2.5052
4.0331
1.8407
3.2252
3.3313
1.6555
2.0685
2.6944
1.5494
1.8364
1.8372
3.4329
3.8219
1.6332
1.5061
1.5214
2.0056
3.3673
2.1137
2.7841
1.9850
2.1473
2.3712
2.5655
2.2647
1.6206
1.6700
2.4116
1.6932
2.5522
1.6277
2.9663
3.7830
1.8506
1.7631
2.8417
2.9806
2.1214
2.1561
2.1888
2.1089
2.4743
2.3409
1.9061
1.7674
2.7206
3.4606
1.6863
1.8932
2.4011
2.2686
1.7131
2.2803
1.7171
1.9732
1.7178
1.9143
1.4510
3.2377
2.2500
1.8290
2.1394
3.0828
2.1373
2.0031
2.3608
2.8301
1.8092
2.3573
1.9300
1.8900
1.9180
2.2582
3.1516
2.6300
1.6959
2.0105
1.9393
4.1140
1.5049
3.3769
5.2802
2.8814
1.9997
2.0849
2.0606
2.1785
1.3761
2.1078
1.5782
1.8571
2.5762
3.7403
3.2722
2.1694
1.7374
1.8202
1.9531
5.2114
1.5209
1.8567
2.2269
2.0769
6.5523
1.6649
3.6942
2.0398
1.6697
2.2643
2.3169
2.5668
4.6674
1.8211
2.1373
2.0317
1.8884
1.8498
1.6197
2.4375
1.6976
1.8281
1.4417
2.8025
2.0342
2.6802
1.8525
2.3066
1.5621
2.3369
2.0752
2.5609
1.8787
3.0633
2.4343
9.0075
2.2312
2.1592
1.6924
2.0200
1.7122
2.2771
1.5618
2.9398
1.9049
2.7112
1.7003
1.6870
2.1307
1.6659
1.5115
2.2211
2.0252
1.8544
1.4517
1.3800
2.9232
1.6665
1.9171
1.6493
1.9881
2.0807
2.0759
1.2931
2.0713
1.7423
3.0200
2.7102
2.5999
1.5614
1.8196
2.0943
2.1923
2.4057
1.8049
1.5076
2.5803
1.8316
1.8238
1.6072
1.8363
2.8800
1.6225
2.2379
1.9086
2.0058
1.5964
3.0622
1.8056
2.0481
2.6230
2.5718
2.4484
4.8848
2.0584
1.7286
2.3303
2.0452
2.5861
2.1619
1.7750
1.7517
2.2799
3.7831
1.9328
3.0274
1.8237
1.9539
1.9688
2.8542
2.1648
1.7796
1.4165
2.0635
1.5512
2.4537
1.8025
1.7956
2.1426
2.3666
2.5232
1.7208
1.4933
2.9103
2.3218
1.7705
2.0426
1.5930
2.3843
2.4137
1.5038
2.4345
1.9328
2.5741
1.9144
2.4423
1.5700
2.3361
1.8594
1.7644
2.2995
1.8335
3.9936
1.6851
3.1330
1.8009
2.0876
2.8069
2.4640
1.9396
1.5216
1.3678
2.1538
1.5096
1.6284
1.9524
1.8641
2.0955
2.0575
1.4833
1.9324
1.9538
1.8318
1.9908
2.0339
2.1765
2.2689
2.0712
2.3893
1.8392
1.7216
1.7257
2.6570
1.5864
1.7469
[torch.FloatTensor of size 512]
), ('layer4.0.downsample.0.weight',
( 0 , 0 ,.,.) =
5.6973e-03
( 0 , 1 ,.,.) =
2.0359e-03
( 0 , 2 ,.,.) =
1.6696e-02
...
( 0 ,253,.,.) =
8.4662e-03
( 0 ,254,.,.) =
-2.7450e-02
( 0 ,255,.,.) =
9.6710e-03
⋮
( 1 , 0 ,.,.) =
-2.7123e-02
( 1 , 1 ,.,.) =
-1.5713e-02
( 1 , 2 ,.,.) =
5.4291e-02
...
( 1 ,253,.,.) =
-2.0631e-02
( 1 ,254,.,.) =
-3.0793e-02
( 1 ,255,.,.) =
1.3228e-03
⋮
( 2 , 0 ,.,.) =
-5.2315e-02
( 2 , 1 ,.,.) =
-3.5294e-02
( 2 , 2 ,.,.) =
3.9423e-02
...
( 2 ,253,.,.) =
-3.8161e-02
( 2 ,254,.,.) =
-2.6385e-02
( 2 ,255,.,.) =
-4.4272e-02
...
⋮
(509, 0 ,.,.) =
4.9361e-02
(509, 1 ,.,.) =
4.3553e-02
(509, 2 ,.,.) =
1.0309e-02
...
(509,253,.,.) =
7.1570e-03
(509,254,.,.) =
1.4031e-03
(509,255,.,.) =
-6.6892e-02
⋮
(510, 0 ,.,.) =
5.3341e-02
(510, 1 ,.,.) =
-1.4842e-02
(510, 2 ,.,.) =
-4.8024e-02
...
(510,253,.,.) =
5.4730e-03
(510,254,.,.) =
4.2852e-02
(510,255,.,.) =
1.2923e-02
⋮
(511, 0 ,.,.) =
3.0030e-02
(511, 1 ,.,.) =
-9.1642e-03
(511, 2 ,.,.) =
9.0266e-03
...
(511,253,.,.) =
1.0095e-02
(511,254,.,.) =
-1.1120e-02
(511,255,.,.) =
-7.9560e-03
[torch.FloatTensor of size 512x256x1x1]
), ('layer4.0.downsample.1.weight',
0.1694
0.3368
0.2993
0.3745
0.1513
0.1781
0.3167
0.3947
0.1858
0.2068
0.1090
0.2042
0.2955
0.0765
0.2023
0.2487
0.3295
0.3349
0.2532
0.2739
0.1661
0.3432
0.3424
0.2969
0.2226
0.0993
0.3328
0.2349
0.2894
0.2296
0.2719
0.3945
0.1990
0.2564
0.2557
0.3541
0.1848
0.2513
0.3101
0.2782
0.2109
0.2441
0.3282
0.3248
0.2499
0.1873
0.2643
0.3949
0.1962
0.2587
0.1708
0.3381
0.2238
0.2498
0.2787
0.3783
0.3445
0.2681
0.2956
0.1146
0.2688
0.3479
0.1295
0.2843
0.1552
0.3026
0.2738
0.1891
0.3568
0.2302
0.2199
0.2070
0.2119
0.0971
0.2482
0.2264
0.3555
0.3113
0.2386
0.2654
0.2975
0.2666
0.2180
0.1451
0.2460
0.1734
0.2358
0.2891
0.2091
0.1971
0.2185
0.2008
0.2461
0.3726
0.2028
0.1993
0.3652
0.2258
0.2606
0.1900
0.2764
0.2011
0.1973
0.2958
0.3222
0.4117
0.1475
0.2674
0.1928
0.3615
0.2774
0.2143
0.2688
0.4286
0.2560
0.2777
0.1339
0.5103
0.3238
0.2417
0.1529
0.1843
0.0579
0.2288
0.1797
0.2803
0.2279
0.1579
0.3196
0.1842
0.3378
0.1688
0.1654
0.3049
0.3533
0.2948
0.1140
0.2503
0.1892
0.2647
0.2405
0.3880
0.1933
0.1918
0.2511
0.2901
0.3151
0.3252
0.1296
0.2491
0.1417
0.1295
0.3062
0.2836
0.3483
0.2306
0.2741
0.2700
0.1873
0.2431
0.3526
0.3546
0.2721
0.2708
0.3065
0.0832
0.2968
0.2286
0.3276
0.2695
0.2452
0.2444
0.2857
0.3365
0.2784
0.2933
0.3397
0.2231
0.2330
0.1486
0.3846
0.3104
0.1724
0.1724
0.3466
0.2978
0.2582
0.1879
0.2419
0.2249
0.2720
0.3735
0.4259
0.3754
0.1731
0.3698
0.2349
0.2694
0.3148
0.1658
0.1181
0.2994
0.4018
0.2126
0.3864
0.2955
0.1848
0.3686
0.1972
0.3265
0.2319
0.1676
0.1756
0.2367
0.2139
0.1974
0.2561
0.2619
0.2170
0.2284
0.3486
0.4500
0.2563
0.2559
0.2814
0.1797
0.1736
0.2013
0.3411
0.2245
0.1385
0.2284
0.2230
0.2566
0.2301
0.3639
0.1380
0.2381
0.2590
0.0830
0.1863
0.1267
0.4501
0.2741
0.2590
0.2782
0.2248
0.2718
0.1949
0.1815
0.2969
0.3168
0.3389
0.2790
0.1594
0.2752
0.2947
0.2909
0.1418
0.3336
0.1953
0.2646
0.0879
0.2553
0.3335
0.1943
0.2777
0.2386
0.3676
0.3042
0.1234
0.2615
0.2548
0.3224
0.3462
0.2090
0.2142
0.2054
0.2115
0.2153
0.2163
0.2509
0.2429
0.3326
-0.0527
0.2244
0.2319
0.2674
0.1103
0.2320
0.2822
0.3234
0.2818
0.2093
0.2261
0.2900
0.3127
0.3456
0.2592
0.1677
0.3924
0.2694
0.1997
0.2973
0.3324
0.2270
0.0656
0.2964
0.1948
0.2383
0.3021
0.2510
0.3117
0.3185
0.1721
0.1867
0.1665
0.2851
0.3512
-0.0486
0.1558
0.2213
0.3281
0.3861
0.2375
0.3057
0.1178
0.2681
0.1921
0.2211
0.1679
0.2877
0.2495
0.2451
0.2678
0.2393
0.0988
0.2778
0.2465
0.1747
0.1005
0.0502
0.2809
0.2810
0.1716
0.2114
0.2213
0.2817
0.1506
0.0769
0.2381
0.2411
0.2942
0.2543
0.2556
0.3451
0.2948
0.3040
0.3204
0.2757
0.1657
0.2941
0.1301
0.1854
0.2866
0.3198
0.2127
0.3608
0.3440
0.0954
0.2586
0.1709
0.2007
0.1967
0.1972
0.1942
0.3201
0.3484
0.3437
0.3153
0.2020
0.3251
0.3227
0.3038
0.2634
0.2364
0.2492
0.3080
0.2591
0.2391
0.2720
0.2601
0.3210
0.1818
0.3526
0.3579
0.2861
0.2526
0.1642
0.2897
0.3996
0.2651
0.2031
0.2502
0.3694
0.2085
0.2804
0.2233
0.2309
0.1609
0.2369
0.2116
0.3549
0.1635
0.1642
0.3072
0.3077
0.2152
0.2821
0.2857
0.1701
0.2305
0.2134
0.3189
0.1061
0.2628
0.2608
0.1749
0.0820
0.1815
0.3566
0.1204
0.3159
0.1595
0.3790
0.3272
0.2086
0.3096
0.2253
0.1456
0.1346
0.2304
0.2913
0.2727
0.2027
0.2688
0.1958
0.2277
0.3036
0.3250
0.3000
0.3328
0.2417
0.2665
0.2473
0.0913
0.2503
0.2543
0.3710
0.3321
0.3693
0.1099
0.1701
0.1758
0.3888
0.2206
0.2766
0.2813
0.1755
0.2616
0.1544
0.2519
0.1945
0.2452
0.3405
0.2446
0.2426
0.1822
0.3002
0.3037
0.3118
0.2414
0.2326
0.1303
0.3081
0.0979
0.2776
0.2918
0.3848
0.1789
0.3622
0.3005
0.1923
0.2672
0.1663
0.2998
0.2710
0.2040
0.2565
0.2289
0.2552
0.2121
0.3532
0.2293
0.2510
0.3085
0.2368
0.3000
0.2111
0.3456
0.3422
0.1576
[torch.FloatTensor of size 512]
), ('layer4.0.downsample.1.bias',
-0.1759
-0.2156
-0.2047
-0.1695
-0.1628
-0.1473
-0.2158
-0.2905
-0.1112
-0.2196
-0.1020
-0.1549
-0.1989
-0.0445
-0.1508
-0.1920
-0.2114
-0.1655
-0.1854
-0.1733
-0.1289
-0.2376
-0.1965
-0.1965
-0.1776
-0.1774
-0.1760
-0.1546
-0.1648
-0.2599
-0.1752
-0.2498
-0.1741
-0.2410
-0.2498
-0.2938
-0.1496
-0.1578
-0.1800
-0.1851
-0.1516
-0.1345
-0.2746
-0.1248
-0.2246
-0.2531
-0.2398
-0.1859
-0.1739
-0.2393
-0.1214
-0.1803
-0.2729
-0.2617
-0.1855
-0.2316
-0.2333
-0.1860
-0.2097
-0.0692
-0.1912
-0.2078
-0.1084
-0.2810
-0.1303
-0.1654
-0.2119
-0.3641
-0.2951
-0.2384
-0.1632
-0.1892
-0.1792
-0.2031
-0.1770
-0.2738
-0.3324
-0.1725
-0.1793
-0.2638
-0.2207
-0.1609
-0.1534
-0.1414
-0.2992
-0.1450
-0.1838
-0.1779
-0.1422
-0.2198
-0.1900
-0.1580
-0.1666
-0.2490
-0.1569
-0.1718
-0.1660
-0.1972
-0.2287
-0.2366
-0.2230
-0.1543
-0.2030
-0.1431
-0.1363
-0.2015
-0.1804
-0.2093
-0.2964
-0.1984
-0.2683
-0.2216
-0.2147
-0.3404
-0.2668
-0.1890
-0.1733
-0.2226
-0.1772
-0.1698
-0.1095
-0.2180
-0.1154
-0.1654
-0.1910
-0.3535
-0.3112
-0.2161
-0.1496
-0.1667
-0.2849
-0.2207
-0.1529
-0.1807
-0.2118
-0.1869
-0.1376
-0.1770
-0.1861
-0.1969
-0.1741
-0.3011
-0.0787
-0.2017
-0.1947
-0.2247
-0.2459
-0.1058
-0.1401
-0.1213
-0.1199
-0.1760
-0.2156
-0.3307
-0.3515
-0.2366
-0.1185
-0.2155
-0.1751
-0.1892
-0.3365
-0.1598
-0.2554
0.0644
-0.2856
-0.1198
-0.1583
-0.2297
-0.3352
-0.1987
-0.2686
-0.1632
-0.2461
-0.2900
-0.2428
-0.1449
-0.1900
-0.2149
-0.1541
-0.2917
-0.2504
-0.2213
-0.0463
-0.1547
-0.1511
-0.1527
-0.1735
-0.1931
-0.1987
-0.2239
-0.2086
-0.2688
-0.1845
-0.1797
-0.1833
-0.3880
-0.1539
-0.1553
-0.1567
-0.2238
-0.1511
-0.2540
-0.2849
-0.1826
-0.2687
-0.2328
-0.2108
-0.2410
-0.1022
-0.1507
-0.1978
-0.1734
-0.2282
-0.0985
-0.1847
-0.1770
-0.1576
-0.1937
-0.1643
-0.2822
-0.1866
-0.2754
-0.2266
-0.2169
-0.1352
-0.2194
-0.1060
-0.2139
-0.1322
-0.1889
-0.2130
-0.1913
-0.2364
-0.1402
-0.2228
-0.2354
-0.1632
-0.1905
-0.1428
-0.1177
-0.2419
-0.2733
-0.2963
-0.1600
-0.3558
-0.3673
-0.2201
-0.1505
-0.2084
-0.0870
-0.2052
-0.2070
-0.1986
-0.2299
-0.0745
-0.1765
-0.1412
-0.2180
-0.1450
-0.1426
-0.1452
-0.2916
-0.0871
-0.1359
-0.2003
-0.1125
-0.2588
-0.1988
-0.2028
-0.2443
-0.0864
-0.3415
-0.2579
-0.2343
-0.3552
-0.1859
-0.1153
-0.1732
-0.1780
-0.1909
-0.2018
-0.1886
-0.2751
-0.1501
0.1165
-0.1891
-0.1845
-0.2037
-0.0339
-0.3464
-0.1956
-0.1962
-0.1537
-0.1902
-0.1431
-0.3022
-0.1780
-0.1971
-0.2118
-0.0952
-0.1711
-0.2409
-0.2184
-0.2114
-0.2042
-0.0566
-0.0700
-0.2081
-0.1872
-0.2079
-0.1540
-0.2266
-0.1981
-0.1679
-0.2022
-0.2010
-0.1051
-0.1705
-0.2139
0.0396
-0.1077
-0.2745
-0.2690
-0.2603
-0.2819
-0.1917
-0.1940
-0.2944
-0.1822
-0.2903
-0.1064
-0.2076
-0.2648
-0.3032
-0.2878
-0.1579
-0.0071
-0.2142
-0.2022
-0.1516
-0.1123
0.0246
-0.0978
-0.1382
-0.1800
-0.3214
-0.2179
-0.1369
-0.0800
0.0117
-0.1839
-0.1926
-0.1614
-0.2769
-0.1909
-0.2101
-0.2305
-0.2055
-0.2017
-0.2741
-0.1005
-0.3152
-0.1121
-0.1700
-0.1364
-0.2157
-0.2673
-0.1584
-0.1997
-0.1745
-0.1886
-0.2307
-0.2024
-0.3376
-0.2266
-0.2355
-0.2133
-0.2346
-0.2412
-0.2358
-0.1265
-0.2341
-0.1887
-0.1646
-0.1417
-0.1882
-0.1076
-0.3048
-0.1162
-0.1651
-0.2046
-0.1833
-0.3102
-0.1778
-0.1575
-0.2676
-0.1777
-0.1569
-0.1741
-0.1892
-0.3028
-0.1457
-0.2179
-0.2226
-0.1609
-0.1423
-0.2683
-0.2920
-0.1740
-0.2079
-0.1940
-0.2679
-0.1973
-0.1951
-0.1665
-0.2286
-0.1903
-0.2667
-0.4010
-0.2550
-0.1817
-0.2025
-0.1589
-0.2476
-0.0573
-0.2203
-0.2084
-0.1587
-0.1212
-0.1795
-0.3449
-0.1662
-0.2523
-0.2435
-0.2878
-0.2797
-0.1897
-0.2113
-0.1943
-0.2050
-0.1694
-0.2243
-0.2987
-0.1328
-0.1428
-0.2399
-0.1593
-0.1999
-0.3225
-0.1860
-0.1763
-0.2691
-0.2097
-0.2396
-0.1140
-0.1897
-0.1870
-0.1829
-0.2615
-0.2073
-0.1858
-0.0598
-0.1915
-0.2183
-0.2088
-0.1742
-0.2715
-0.1999
-0.2117
-0.2492
-0.1717
-0.1566
-0.1669
-0.3015
-0.1685
-0.2434
-0.2297
-0.1947
-0.2860
-0.3288
-0.2197
-0.1862
-0.1755
-0.0987
-0.1756
-0.1304
-0.1555
-0.1679
-0.2222
-0.2819
-0.2652
-0.0947
-0.2412
-0.2731
-0.2572
-0.2604
-0.2934
-0.2470
-0.1820
-0.2740
-0.1336
-0.1698
-0.1919
-0.1796
-0.2325
-0.1352
-0.1077
-0.2184
-0.1539
-0.2015
-0.3243
-0.1713
[torch.FloatTensor of size 512]
), ('layer4.0.downsample.1.running_mean',
-0.1023
-0.1302
0.0169
0.0539
0.0531
-0.0650
-0.1681
-0.0962
0.0601
-0.0898
-0.0760
-0.0120
0.0480
-0.0867
-0.0415
-0.0887
-0.0378
-0.2376
-0.0965
-0.0434
0.0303
-0.2381
-0.0065
-0.0700
0.0606
0.0257
-0.1691
-0.1207
-0.1550
-0.0927
-0.0012
-0.0962
-0.0922
-0.1789
0.0146
-0.0498
0.0276
-0.1692
0.0259
0.0377
-0.0292
0.0140
-0.0638
-0.0831
0.1049
-0.0554
-0.0551
0.1332
0.0775
-0.1861
-0.0812
-0.2083
-0.0244
-0.0297
-0.0593
0.1243
-0.0475
-0.0014
-0.0069
-0.1002
-0.1040
-0.0837
0.0009
0.0259
-0.0490
-0.0631
0.0193
-0.0375
-0.0487
-0.0803
-0.1123
-0.1538
-0.1031
-0.0858
-0.0706
-0.0725
-0.0903
0.0075
-0.0850
-0.0287
0.0008
0.0249
-0.1068
-0.1237
-0.1271
0.0930
-0.0295
-0.0846
-0.0562
-0.1210
0.0103
-0.1118
-0.0407
-0.0110
-0.0512
-0.1326
0.0454
-0.1072
-0.1018
-0.1699
-0.0338
-0.0950
-0.1897
0.0623
-0.0210
0.0932
-0.0986
0.0823
-0.0911
0.0711
-0.1106
0.0176
-0.0164
-0.2472
-0.1185
-0.0477
-0.0651
0.1771
0.0150
-0.0449
-0.1536
0.0856
0.0214
-0.0775
-0.0115
-0.0189
0.0531
-0.0859
-0.0380
0.0722
-0.0279
0.0244
-0.0680
-0.0434
-0.0344
-0.1618
-0.0222
-0.0492
0.0432
-0.1546
-0.1090
-0.1352
-0.1276
0.0435
0.0177
-0.0656
-0.0611
0.2025
-0.0140
-0.1246
0.0086
-0.0182
0.0312
-0.1482
-0.1866
-0.0051
-0.0816
-0.1819
-0.0977
-0.0378
0.0002
-0.0387
-0.0635
0.1461
-0.1282
-0.0743
0.0310
-0.1228
-0.0425
0.0254
-0.0923
0.0005
-0.0166
-0.1357
-0.0625
0.0060
0.0374
-0.0008
-0.1331
-0.0414
-0.2023
-0.0162
-0.1962
-0.0725
0.0208
-0.0585
-0.1135
-0.1361
-0.1067
-0.1719
0.0145
-0.0390
0.0426
-0.1199
0.0811
-0.0991
-0.1779
-0.0845
0.0010
-0.0083
0.0078
-0.0986
-0.0941
0.0696
-0.1100
-0.1146
0.0178
-0.1711
-0.0144
-0.0282
0.0487
-0.0513
-0.0963
0.0386
-0.1037
0.0128
-0.0490
-0.0292
-0.0553
-0.1402
0.0022
-0.0791
-0.0174
0.0108
-0.0066
0.0250
0.0028
-0.0150
-0.1172
0.0335
0.0034
-0.1005
-0.1735
-0.1138
-0.0804
-0.0329
-0.0286
-0.2133
-0.0151
-0.0876
0.0146
-0.0277
-0.1421
0.0272
0.0350
-0.1483
-0.1306
-0.0596
-0.1365
-0.1003
-0.0083
-0.0906
-0.1012
-0.1426
0.0432
-0.0785
-0.0461
0.0157
0.0150
-0.1290
0.0685
-0.1478
-0.1259
-0.0573
0.0999
-0.0234
-0.1340
0.0173
0.1673
0.0693
0.0070
0.0203
-0.0508
-0.1397
-0.1292
-0.0331
0.0088
0.1208
-0.1808
-0.0149
-0.1302
0.0323
-0.0986
-0.0620
0.0781
0.0809
-0.0918
-0.0450
-0.1246
-0.0485
-0.0756
0.0692
-0.0382
-0.0063
-0.0477
-0.0603
-0.0485
-0.0355
-0.1025
-0.0634
0.1515
-0.1320
-0.0714
0.0402
-0.0342
-0.0085
0.0019
-0.0293
-0.1523
-0.0337
-0.0482
-0.0976
-0.0404
-0.0919
-0.0003
0.0222
-0.0552
-0.0686
0.0319
0.1502
-0.1174
-0.1299
0.0183
-0.0151
-0.1464
-0.0842
-0.0300
-0.0734
-0.0539
-0.1281
0.0408
-0.0897
-0.1408
0.0572
0.0280
-0.0091
-0.1038
-0.0243
-0.0847
-0.0224
-0.0027
-0.1154
-0.0466
0.0305
-0.1060
-0.0092
-0.0748
0.0004
-0.0888
-0.1423
-0.0397
-0.1643
-0.0451
0.0331
0.0008
-0.1542
0.0999
-0.0046
-0.0571
-0.0843
0.0550
-0.1814
-0.1387
-0.0335
0.0472
-0.0325
-0.0034
-0.0210
0.0393
0.0093
-0.0188
-0.0973
0.5186
0.0181
-0.0405
-0.0579
-0.0143
-0.0268
-0.0422
0.0041
-0.0778
-0.0486
0.0359
-0.1563
-0.0826
-0.1485
-0.0987
-0.0028
-0.0243
-0.0655
0.0076
-0.1397
-0.1042
-0.0823
-0.0552
0.0079
-0.0470
0.0660
-0.1063
-0.0572
-0.0552
-0.0801
-0.0892
0.0282
0.1233
-0.2059
-0.0203
-0.0241
0.0828
0.0044
-0.0312
-0.1715
0.0464
-0.0714
0.0321
-0.0967
-0.0669
-0.0344
-0.0770
0.0563
-0.1468
-0.0696
-0.0072
-0.0250
-0.0432
-0.0625
-0.0025
0.0089
-0.0822
-0.1244
0.0708
0.0160
-0.1348
-0.0627
-0.1054
0.1421
-0.0086
-0.0767
-0.1251
-0.0547
-0.1313
-0.0230
0.0155
-0.0489
-0.0013
0.0450
0.0332
-0.0467
-0.1055
0.0485
-0.1123
-0.0773
0.0066
-0.0378
-0.0175
-0.0315
0.0455
-0.1783
-0.0309
-0.0871
-0.0732
-0.0334
-0.0210
0.0869
-0.0567
0.0474
-0.1976
-0.0912
-0.1234
-0.0575
-0.0649
-0.0924
-0.0114
-0.0757
-0.1116
-0.0291
-0.0494
-0.0320
-0.1919
-0.0641
-0.0226
-0.1687
0.0051
-0.1272
0.0922
-0.0861
-0.0604
-0.1110
-0.0010
-0.0269
-0.1265
0.0806
-0.0886
0.0017
-0.0185
-0.0132
-0.0899
-0.1026
-0.0924
-0.0599
-0.0240
-0.0059
0.0808
-0.0403
-0.1129
0.0874
-0.0083
-0.1941
-0.1473
-0.0343
-0.0190
-0.0061
[torch.FloatTensor of size 512]
), ('layer4.0.downsample.1.running_var',
1.00000e-02 *
1.4797
3.7974
2.4287
3.3282
0.9573
1.2175
3.2409
2.3881
2.1434
1.2457
0.7617
1.4534
2.5956
0.8145
1.6107
1.6402
3.2195
3.4207
2.1148
2.1447
1.3177
3.4486
3.5753
2.3377
1.7639
0.4832
2.8323
1.9312
2.7409
1.7613
2.2178
4.2271
1.6393
1.5593
1.8405
2.1813
1.7244
2.2655
3.4637
1.7027
1.8760
2.6324
2.8153
2.7661
2.3234
1.0893
1.9429
4.7713
1.4600
2.0709
1.4191
2.7877
1.1790
1.8164
2.1013
3.3222
2.2192
2.3825
2.3330
0.9719
2.0101
3.5948
1.0746
1.9807
1.4409
2.5751
2.0639
0.8093
2.4897
1.6369
2.4380
1.5503
1.5689
0.4939
2.4048
1.2735
2.3085
2.6694
2.2144
1.9275
2.1960
1.9823
1.5991
1.2718
1.6494
1.1768
1.6908
2.4666
2.2209
1.7352
1.6695
2.0054
2.4338
2.7665
1.7877
1.7486
3.7940
1.5464
2.0674
1.2233
3.3432
1.5017
1.3682
2.8803
2.8052
3.1367
1.3165
2.6028
1.1627
4.7816
1.9174
1.1349
1.8531
4.4545
1.9873
2.8756
0.8903
5.2529
3.0137
1.7425
1.3181
1.3588
0.3196
1.6511
1.0533
1.7726
1.5709
0.8342
2.8195
1.3471
2.7743
1.2433
1.3966
2.0415
3.0947
2.9389
0.6835
1.7954
1.2979
2.2241
1.7859
2.9926
1.8215
1.1935
2.3874
2.4038
2.4009
3.7302
0.9383
2.6433
1.1903
0.8586
2.9513
1.8345
1.2961
1.6569
2.6276
2.5337
1.5654
2.1501
2.2197
2.3241
1.9577
6.7689
2.4607
0.4769
2.4836
1.6700
1.9427
2.3107
1.7138
1.8203
1.8894
2.4965
2.1896
2.5463
3.6071
1.5943
1.8717
0.6850
4.1029
2.3362
1.9808
1.2859
3.2598
3.0832
2.4954
1.2277
1.7085
1.5440
2.3446
2.9087
5.9264
2.4963
1.2804
2.0356
2.2088
3.2317
4.0116
0.9682
1.0246
2.2698
3.1086
1.7329
2.7773
2.2563
1.2499
2.8079
2.1120
2.3154
1.7160
1.1038
1.0535
1.8923
1.3009
1.4156
2.7678
2.4117
2.1302
1.4714
3.6207
2.8543
1.6743
1.8916
2.6885
1.0043
1.2075
1.4134
3.3789
2.0699
1.0101
1.8902
1.5572
4.9192
1.6134
2.8751
0.9142
4.2635
2.6588
0.4665
0.9972
0.5729
4.9139
2.4250
1.6319
1.4276
1.4179
2.4507
1.2122
2.0003
2.4153
2.7940
2.5884
2.0835
1.2927
2.4182
3.4140
2.1667
0.7899
2.9858
1.3404
2.1888
0.5248
2.4414
3.3217
1.7424
1.7588
2.2876
2.5777
3.6217
1.1590
1.5665
1.6886
2.4274
3.3398
1.6618
1.9122
1.7813
1.5589
1.7732
1.7904
1.9168
1.6683
3.3678
0.4529
1.5886
1.8173
2.2744
0.7121
1.2488
2.3408
3.1028
2.7164
1.5513
1.7717
1.6643
2.9922
2.2554
1.7378
1.4135
4.6231
2.3767
1.3142
2.4729
2.3066
2.3765
0.3310
2.0579
1.2455
1.6946
2.9351
1.9246
2.4107
2.7394
1.1762
1.1401
1.7944
2.3090
2.7987
0.4324
1.2802
1.0422
2.7148
3.0546
1.4914
2.2719
0.7397
1.6942
1.0857
1.4844
1.6265
2.8345
1.9868
1.5381
1.6695
2.1697
0.9911
1.8018
1.6002
1.0949
0.5767
0.5036
2.6319
3.0716
0.9113
1.0563
1.6398
3.0490
1.3609
0.6690
1.9067
1.7289
2.5994
1.5580
2.1489
2.6740
2.5944
2.3086
3.0448
1.7901
1.6307
1.8869
0.8179
1.2594
2.8673
2.7379
1.2914
4.2257
4.4290
0.4725
1.7098
1.0509
3.4835
1.0232
1.0880
0.9897
2.4268
3.1363
2.8433
2.3923
1.8523
2.2239
2.7958
2.1271
1.8237
1.6664
2.8019
2.1324
2.8550
1.7067
1.8597
1.5267
1.9043
1.0217
2.7563
2.8792
2.0045
1.8991
1.1335
2.2008
2.1896
1.9881
1.2837
2.2065
4.3280
1.8434
1.9879
1.2119
1.9007
0.9195
2.2533
0.9538
2.9914
1.1779
1.0417
2.5136
2.4045
1.4470
2.0585
2.0260
1.3212
1.7874
1.6841
1.8557
0.7608
2.6879
1.6277
1.3738
0.5450
1.2819
3.6177
0.8542
3.0353
1.1260
3.8203
1.9922
1.6696
2.0955
1.6163
0.9182
1.0645
1.6338
2.1920
2.2267
1.4893
2.3184
1.4378
1.3713
1.8360
2.0984
2.4619
2.3726
2.2280
1.8140
2.0319
0.3983
1.9480
2.7284
2.8425
2.3305
2.8359
0.6877
1.2102
1.1683
3.8713
1.6494
2.0911
1.9379
1.4409
1.5669
0.9922
3.0646
1.4635
1.4432
4.0783
1.7921
1.8565
1.4059
2.0364
2.4347
2.1271
1.6969
1.6637
1.1990
3.1063
0.5982
2.7176
2.5192
2.5004
1.0163
2.3461
2.9223
1.0756
1.7914
0.9306
2.5531
1.6042
1.2558
1.8730
1.3725
3.0774
1.1870
3.7628
1.7584
1.7254
2.6002
1.8345
1.8618
1.3726
3.4435
1.4385
1.1154
[torch.FloatTensor of size 512]
), ('layer4.1.conv1.weight',
( 0 , 0 ,.,.) =
-8.0284e-03 -5.7776e-03 6.4154e-03
5.0498e-03 -6.7796e-03 1.2691e-02
1.3331e-02 1.4523e-02 2.4522e-02
( 0 , 1 ,.,.) =
-1.9876e-03 1.2466e-02 1.0494e-02
-1.9364e-02 -1.6696e-02 -1.1857e-02
-1.1569e-02 -3.7674e-03 -3.4679e-03
( 0 , 2 ,.,.) =
-1.1440e-02 -1.3884e-02 1.1559e-03
-1.7906e-02 -2.9349e-02 -1.3876e-02
-1.4057e-02 -2.6989e-02 -2.3963e-02
...
( 0 ,509,.,.) =
-6.3040e-03 -3.1167e-03 -1.3304e-02
7.1623e-03 6.4669e-03 1.6063e-02
-1.0750e-02 -1.0480e-02 -6.1070e-03
( 0 ,510,.,.) =
7.4484e-03 6.3878e-03 -1.2579e-02
-7.7356e-03 1.8112e-03 -1.7890e-02
-2.9142e-03 7.7705e-03 -9.7314e-03
( 0 ,511,.,.) =
2.1760e-02 2.2364e-02 2.2731e-02
2.6681e-02 2.9127e-02 3.3356e-02
1.2892e-02 -3.5818e-03 5.3022e-03
⋮
( 1 , 0 ,.,.) =
-1.0597e-02 -9.1551e-03 -2.3418e-02
-1.0768e-02 -3.3171e-03 -1.8559e-02
-1.8607e-02 -4.2634e-03 -1.5591e-02
( 1 , 1 ,.,.) =
-2.6090e-02 -2.2517e-02 -3.0593e-02
-3.9406e-02 -2.6639e-02 -2.8202e-02
-2.6143e-02 -1.9647e-02 -2.1466e-02
( 1 , 2 ,.,.) =
-3.5259e-03 1.6623e-03 -6.5624e-03
-5.0597e-03 -8.7162e-04 -5.3742e-03
-7.9651e-03 -9.7778e-03 -1.0736e-02
...
( 1 ,509,.,.) =
1.8492e-02 -3.6799e-03 1.0043e-02
-5.2974e-03 -2.0757e-02 -1.5120e-02
2.1435e-02 6.4916e-03 4.7660e-03
( 1 ,510,.,.) =
-1.8810e-02 -6.0469e-04 -7.6999e-03
-1.7697e-02 -7.8692e-03 -1.6543e-02
-1.7206e-02 -2.4746e-02 -3.0270e-02
( 1 ,511,.,.) =
-3.1191e-02 -1.4363e-02 2.2032e-03
-1.2033e-02 -2.3699e-03 -1.6630e-02
-1.2905e-02 -1.5363e-02 -3.6297e-03
⋮
( 2 , 0 ,.,.) =
-3.2648e-02 -4.8158e-03 -2.0476e-02
-2.5846e-02 -1.4660e-03 -2.8170e-02
-2.6640e-02 4.3022e-03 -2.7636e-02
( 2 , 1 ,.,.) =
-6.3289e-03 -1.5401e-02 -1.3096e-03
-1.7499e-02 -2.6212e-02 -2.3646e-02
-7.3207e-03 -1.5592e-02 -8.9578e-03
( 2 , 2 ,.,.) =
8.9701e-04 -6.6914e-03 -5.3129e-03
-1.1727e-03 -1.0726e-02 -9.0103e-03
3.2311e-03 -4.5854e-03 4.3512e-03
...
( 2 ,509,.,.) =
-2.1822e-02 -3.6889e-02 -2.2588e-02
-1.3054e-02 -3.4191e-02 -2.7238e-02
-1.2383e-02 -2.3452e-02 -2.2486e-02
( 2 ,510,.,.) =
6.8177e-03 2.1561e-02 1.3674e-02
3.1192e-03 1.0660e-02 1.0409e-02
8.0477e-03 -4.6817e-03 -4.3912e-03
( 2 ,511,.,.) =
-1.1983e-02 -1.6201e-02 -2.2626e-02
-1.3461e-02 -7.0928e-03 -1.4384e-02
-2.4456e-02 1.4885e-02 1.2247e-02
...
⋮
(509, 0 ,.,.) =
-2.6347e-02 -2.9923e-02 -3.7810e-02
-1.5663e-02 -4.1126e-03 -1.1482e-02
-1.3415e-02 -1.5432e-02 -1.8204e-02
(509, 1 ,.,.) =
-3.8392e-03 -1.1093e-02 -8.0841e-04
-5.9634e-03 -5.9165e-03 -9.3332e-03
-2.2761e-03 5.4781e-03 -5.6050e-03
(509, 2 ,.,.) =
-1.8406e-03 -2.8134e-03 8.3246e-03
-1.2453e-03 2.1453e-04 7.4868e-03
1.3450e-02 3.0599e-02 2.6405e-02
...
(509,509,.,.) =
3.5268e-04 2.3897e-03 6.2558e-03
-1.4338e-02 -2.3146e-02 -1.9024e-02
-2.7306e-02 -3.0079e-02 -3.1762e-02
(509,510,.,.) =
1.4584e-02 4.3430e-03 1.2053e-02
-6.1130e-03 -2.8539e-02 -1.8268e-02
-1.6844e-02 -4.7816e-02 -2.6274e-02
(509,511,.,.) =
-1.8850e-02 -9.3396e-03 7.8905e-03
-1.5322e-03 8.3153e-03 1.7783e-02
-8.3318e-03 -1.5759e-02 -1.2061e-02
⋮
(510, 0 ,.,.) =
9.9578e-03 7.4573e-03 -1.8738e-03
-1.7752e-03 -6.8015e-04 -7.4443e-03
-1.8319e-02 -1.4264e-02 -7.1446e-03
(510, 1 ,.,.) =
7.8524e-03 -2.6520e-03 -1.7556e-02
4.5240e-03 -4.8661e-03 -1.5215e-02
-5.0211e-03 -1.1864e-02 -1.4846e-02
(510, 2 ,.,.) =
2.9163e-02 1.0344e-02 2.4736e-02
1.2012e-02 -1.0346e-02 3.5472e-03
8.2238e-03 -1.8237e-02 -5.4892e-03
...
(510,509,.,.) =
-8.8434e-03 -4.3184e-03 -5.7536e-03
7.7230e-03 -4.1936e-04 7.7260e-03
1.3536e-02 1.5705e-02 2.0893e-02
(510,510,.,.) =
1.6743e-03 1.9720e-03 2.1567e-02
-8.0074e-03 -4.6606e-03 4.0560e-03
-1.6688e-02 -1.3754e-02 -1.1708e-02
(510,511,.,.) =
-9.7959e-03 -9.4502e-03 -9.3443e-03
6.9547e-03 -3.9134e-05 6.2691e-03
-1.3193e-02 9.3272e-04 1.4579e-02
⋮
(511, 0 ,.,.) =
-1.4963e-03 5.5133e-04 1.1571e-02
1.0174e-02 1.7889e-03 1.1035e-02
7.0212e-03 1.4651e-03 1.2769e-03
(511, 1 ,.,.) =
-1.3021e-02 6.4109e-03 -1.5199e-02
2.4775e-02 2.1926e-02 3.3679e-02
2.6471e-04 -3.0235e-03 1.1690e-02
(511, 2 ,.,.) =
-2.9665e-02 -1.5314e-02 -1.7500e-02
-1.8339e-02 -2.0845e-02 -1.5494e-02
-1.6086e-03 1.0831e-02 -1.4309e-02
...
(511,509,.,.) =
-7.7044e-03 -2.1100e-02 -2.2816e-02
5.7688e-03 1.9362e-04 7.7105e-04
-6.1357e-03 9.7275e-03 -2.5464e-03
(511,510,.,.) =
1.1043e-02 2.4205e-02 3.4213e-02
2.9181e-02 2.6904e-02 4.5372e-02
-2.1594e-02 -1.1072e-03 -7.8312e-03
(511,511,.,.) =
-8.3287e-03 -7.9521e-03 -5.3358e-03
-6.2527e-04 -5.3243e-03 -8.6296e-03
3.6094e-03 -1.2544e-03 -4.3801e-03
[torch.FloatTensor of size 512x512x3x3]
), ('layer4.1.bn1.weight',
0.2587
0.3073
0.2595
0.3223
0.2662
0.2652
0.2575
0.2660
0.2766
0.2414
0.3045
0.2853
0.2821
0.2880
0.3094
0.3444
0.3155
0.4129
0.2110
0.2903
0.2496
0.2601
0.2967
0.3033
0.4152
0.2719
0.3661
0.3251
0.3898
0.3346
0.2753
0.2712
0.2414
0.3351
0.3394
0.3167
0.3360
0.2666
0.2109
0.2705
0.2587
0.3070
0.2720
0.2316
0.2885
0.2884
0.2955
0.3057
0.3043
0.2596
0.2673
0.1929
0.3136
0.3593
0.2622
0.2931
0.3295
0.2514
0.3208
0.2798
0.3259
0.2939
0.2390
0.3105
0.3471
0.2812
0.2148
0.2997
0.3061
0.2740
0.2791
0.3790
0.3592
0.3247
0.2995
0.2735
0.3356
0.2703
0.3255
0.3127
0.2783
0.2702
0.3900
0.2942
0.2899
0.3461
0.3432
0.4685
0.2634
0.2553
0.3019
0.3961
0.2742
0.2995
0.3858
0.2785
0.3212
0.3109
0.3642
0.2193
0.2643
0.2333
0.3151
0.3102
0.2936
0.2374
0.2419
0.2976
0.3335
0.2619
0.3984
0.2721
0.2718
0.2678
0.2757
0.2445
0.3508
0.2174
0.3309
0.2653
0.2564
0.1748
0.3177
0.2751
0.2067
0.2905
0.2762
0.3329
0.2738
0.3224
0.2199
0.2997
0.2206
0.3213
0.2760
0.3927
0.3174
0.2698
0.2988
0.2610
0.2550
0.2788
0.4445
0.2862
0.3606
0.3279
0.2869
0.3294
0.2244
0.2338
0.1754
0.2318
0.3186
0.3322
0.2255
0.3041
0.2837
0.3276
0.2392
0.3668
0.1971
0.2946
0.3613
0.2736
0.2554
0.2860
0.2511
0.3490
0.3253
0.2934
0.2027
0.2580
0.2200
0.3089
0.3074
0.3332
0.2943
0.3375
0.2330
0.2611
0.3383
0.2837
0.3546
0.3093
0.3791
0.2197
0.2648
0.2830
0.2587
0.3588
0.2830
0.3971
0.3194
0.3066
0.2754
0.2647
0.0970
0.2182
0.2334
0.2624
0.1829
0.2933
0.2747
0.3001
0.2996
0.3107
0.3256
0.2940
0.3901
0.2790
0.3030
0.2838
0.3010
0.3044
0.3479
0.3087
0.2611
0.1958
0.2941
0.2558
0.2889
0.3148
0.2516
0.2664
0.2862
0.3940
0.2933
0.2781
0.3796
0.3022
0.2583
0.3021
0.2784
0.2967
0.2994
0.3856
0.3277
0.2587
0.2539
0.2824
0.2634
0.1489
0.2205
0.3929
0.3401
0.2717
0.2789
0.2917
0.3177
0.1992
0.3684
0.3120
0.3201
0.2810
0.2302
0.2779
0.2865
0.2858
0.2713
0.1601
0.2496
0.2895
0.3154
0.3443
0.3285
0.3444
0.3251
0.3235
0.3375
0.2282
0.2128
0.1795
0.3077
0.3005
0.2775
0.3054
0.2914
0.3535
0.2871
0.2669
0.3961
0.2674
0.3898
0.3183
0.3242
0.2789
0.1911
0.2569
0.3427
0.2464
0.2778
0.2098
0.3019
0.3145
0.3271
0.2914
0.2619
0.2643
0.3039
0.2520
0.2099
0.3643
0.2915
0.1957
0.3286
0.2355
0.3210
0.2982
0.3388
0.3450
0.3716
0.2898
0.2846
0.2805
0.2219
0.2910
0.2681
0.3163
0.1964
0.3176
0.3092
0.2706
0.2505
0.2508
0.3166
0.3583
0.1563
0.2608
0.2892
0.3401
0.2891
0.3126
0.2172
0.2459
0.2651
0.4052
0.2986
0.3026
0.3773
0.2262
0.2675
0.2900
0.3759
0.3201
0.2567
0.3443
0.2348
0.3057
0.2347
0.3277
0.2938
0.2746
0.2805
0.2421
0.3590
0.2622
0.2773
0.2396
0.2134
0.2727
0.2984
0.2744
0.2591
0.2628
0.3568
0.2009
0.3220
0.2868
0.2561
0.3113
0.2138
0.3136
0.2745
0.3046
0.3042
0.1972
0.2815
0.2542
0.2983
0.2613
0.2668
0.3142
0.2930
0.3800
0.1966
0.2948
0.3363
0.2713
0.3625
0.2909
0.2695
0.3111
0.3242
0.3009
0.3231
0.3051
0.2012
0.2716
0.3692
0.2694
0.1481
0.2858
0.2819
0.2391
0.2867
0.3466
0.3431
0.2365
0.3357
0.1685
0.2925
0.3092
0.3127
0.1883
0.2561
0.3086
0.1732
0.2989
0.3235
0.2693
0.2630
0.2913
0.2786
0.3124
0.3098
0.2695
0.2403
0.2906
0.2784
0.2654
0.3485
0.3939
0.3033
0.3145
0.2622
0.1540
0.2790
0.2967
0.1954
0.2632
0.2957
0.2581
0.3231
0.2795
0.2859
0.3139
0.2488
0.2404
0.3714
0.2649
0.2267
0.2878
0.3462
0.3063
0.3180
0.1726
0.3153
0.2625
0.3020
0.2996
0.3632
0.1541
0.3192
0.2200
0.2894
0.2622
0.2534
0.2935
0.3208
0.2231
0.2743
0.3023
0.2829
0.2394
0.2506
0.3512
0.3366
0.2666
0.2930
0.3049
0.2321
0.3397
0.2727
0.2900
0.3146
0.2682
0.3094
0.3718
0.3387
0.3202
0.2423
0.2745
0.2966
0.2500
0.2329
0.3419
0.2928
0.3536
0.3739
0.1935
0.2670
0.2846
0.2583
0.3783
0.2826
0.2929
0.2728
0.3645
0.2770
0.2756
0.2523
0.2500
[torch.FloatTensor of size 512]
), ('layer4.1.bn1.bias',
-0.1668
-0.3019
-0.2187
-0.2917
-0.1971
-0.2325
-0.1869
-0.1857
-0.2474
-0.1629
-0.2448
-0.2508
-0.1895
-0.2651
-0.3250
-0.3811
-0.2953
-0.4963
-0.0294
-0.2724
-0.2007
-0.2220
-0.2945
-0.2579
-0.5152
-0.1994
-0.5016
-0.2736
-0.4528
-0.3968
-0.2281
-0.1772
-0.1293
-0.2655
-0.3252
-0.3232
-0.3337
-0.1901
-0.0692
-0.2196
-0.2132
-0.2565
-0.1646
-0.1567
-0.2087
-0.2178
-0.2480
-0.2767
-0.3071
-0.1988
-0.1985
-0.0235
-0.2458
-0.4156
-0.1660
-0.1923
-0.3328
-0.1481
-0.3047
-0.2277
-0.3182
-0.2744
-0.1643
-0.3365
-0.4050
-0.2082
-0.0621
-0.2671
-0.2809
-0.2185
-0.2148
-0.4465
-0.3376
-0.3213
-0.2921
-0.1998
-0.3369
-0.2092
-0.2831
-0.2893
-0.1719
-0.2189
-0.4016
-0.2484
-0.2070
-0.3849
-0.3753
-0.5874
-0.1637
-0.1748
-0.2217
-0.5067
-0.2496
-0.2117
-0.4291
-0.1944
-0.3089
-0.2621
-0.4096
-0.0602
-0.2009
-0.1316
-0.3336
-0.2627
-0.2320
-0.0910
-0.1560
-0.2889
-0.3286
-0.1628
-0.5128
-0.2036
-0.1726
-0.1844
-0.2285
-0.1925
-0.3432
-0.0929
-0.3138
-0.1912
-0.1926
-0.0342
-0.3268
-0.1699
-0.0828
-0.2417
-0.2069
-0.3870
-0.2210
-0.2867
-0.0526
-0.3092
-0.0655
-0.2594
-0.2160
-0.5062
-0.2905
-0.2125
-0.3124
-0.2128
-0.1946
-0.2520
-0.5475
-0.2321
-0.3350
-0.3473
-0.2158
-0.3603
-0.0759
-0.1472
-0.0327
-0.1404
-0.3128
-0.3063
-0.1120
-0.2664
-0.2700
-0.3112
-0.1519
-0.3843
-0.0645
-0.2373
-0.4227
-0.2546
-0.1611
-0.2350
-0.1524
-0.3494
-0.3453
-0.2081
-0.0918
-0.2025
-0.1246
-0.2533
-0.2768
-0.3156
-0.2530
-0.3957
-0.0981
-0.1257
-0.3697
-0.2333
-0.3664
-0.2829
-0.4320
-0.0836
-0.1583
-0.2395
-0.1818
-0.4408
-0.2376
-0.4450
-0.3232
-0.2787
-0.1858
-0.2137
0.0481
-0.1058
-0.1093
-0.2035
-0.0496
-0.2117
-0.1598
-0.2389
-0.2830
-0.2878
-0.3406
-0.2560
-0.4468
-0.2444
-0.2492
-0.2222
-0.2792
-0.3005
-0.4180
-0.2568
-0.1872
-0.0270
-0.2645
-0.1873
-0.3022
-0.3400
-0.1803
-0.1810
-0.2079
-0.4775
-0.2047
-0.1878
-0.4504
-0.2516
-0.1657
-0.2765
-0.2329
-0.2446
-0.2956
-0.4163
-0.2816
-0.1571
-0.2199
-0.2125
-0.1684
0.0356
-0.0914
-0.4484
-0.3535
-0.2212
-0.2550
-0.2509
-0.2702
-0.0599
-0.3505
-0.2924
-0.2360
-0.2339
-0.1259
-0.2597
-0.2267
-0.1978
-0.1371
-0.0129
-0.1175
-0.2527
-0.3099
-0.3231
-0.3468
-0.3553
-0.3537
-0.3315
-0.3713
-0.1091
-0.0959
-0.0258
-0.2756
-0.2808
-0.2012
-0.2812
-0.1991
-0.3948
-0.2257
-0.2469
-0.4211
-0.2110
-0.4670
-0.3069
-0.3549
-0.2337
-0.0612
-0.1321
-0.2968
-0.1870
-0.2316
-0.0686
-0.3113
-0.2895
-0.3149
-0.2686
-0.2081
-0.2096
-0.3011
-0.1810
-0.0227
-0.3873
-0.2665
-0.0225
-0.2973
-0.0973
-0.2980
-0.3219
-0.2926
-0.3196
-0.4332
-0.1980
-0.2117
-0.2302
-0.0980
-0.2344
-0.2154
-0.2921
-0.0350
-0.3361
-0.2620
-0.2188
-0.1566
-0.1795
-0.2726
-0.4103
0.0413
-0.1507
-0.2552
-0.3137
-0.2466
-0.2961
-0.0938
-0.1481
-0.2129
-0.5480
-0.2915
-0.2802
-0.5077
-0.1306
-0.1862
-0.2400
-0.4362
-0.3017
-0.1633
-0.3447
-0.1047
-0.2846
-0.1244
-0.3036
-0.2404
-0.2333
-0.2494
-0.1866
-0.3294
-0.1677
-0.2540
-0.1295
-0.0512
-0.1966
-0.2801
-0.1702
-0.1879
-0.1850
-0.3274
-0.0369
-0.2979
-0.2612
-0.1889
-0.3270
-0.1377
-0.2787
-0.2201
-0.2417
-0.2834
-0.0555
-0.2538
-0.1040
-0.2660
-0.1644
-0.1723
-0.2672
-0.2797
-0.4214
-0.0378
-0.2386
-0.3498
-0.2435
-0.4348
-0.2554
-0.1719
-0.2836
-0.3316
-0.2787
-0.2879
-0.2640
-0.0560
-0.1789
-0.4195
-0.2152
0.0567
-0.2359
-0.2249
-0.0911
-0.2644
-0.3875
-0.3317
-0.1415
-0.3425
-0.0020
-0.1941
-0.2821
-0.2809
-0.0965
-0.1841
-0.2971
-0.0173
-0.3043
-0.3013
-0.1729
-0.1872
-0.2683
-0.2033
-0.3059
-0.2939
-0.2163
-0.1889
-0.2581
-0.2296
-0.2066
-0.3462
-0.4298
-0.2600
-0.3095
-0.1800
-0.0116
-0.2124
-0.2552
-0.0523
-0.2216
-0.2605
-0.2134
-0.2867
-0.2556
-0.2275
-0.3437
-0.1698
-0.1560
-0.4120
-0.2067
-0.1159
-0.2408
-0.3093
-0.2621
-0.2593
-0.0135
-0.3099
-0.2179
-0.2766
-0.2400
-0.3934
0.0072
-0.2982
-0.0930
-0.2166
-0.1635
-0.1827
-0.2308
-0.2525
-0.0991
-0.2325
-0.2938
-0.2480
-0.0934
-0.1911
-0.3772
-0.3369
-0.1606
-0.2752
-0.3005
-0.1372
-0.2990
-0.2156
-0.2622
-0.3160
-0.1342
-0.2903
-0.3865
-0.2916
-0.3243
-0.2051
-0.2656
-0.2359
-0.1508
-0.1063
-0.3595
-0.2312
-0.3046
-0.4178
-0.0276
-0.2204
-0.2426
-0.1616
-0.4789
-0.1713
-0.2802
-0.2305
-0.4327
-0.2413
-0.1862
-0.1486
-0.1507
[torch.FloatTensor of size 512]
), ('layer4.1.bn1.running_mean',
-0.6163
-0.7110
-0.4544
-0.6143
-0.7875
-0.5289
-0.6214
-0.5824
-0.4067
-0.5256
-0.5125
-0.5337
-0.5658
-0.6845
-0.5146
-0.6323
-0.5391
-0.6838
-0.6577
-0.4716
-0.5249
-0.4059
-0.6028
-0.5246
-0.8913
-0.6528
-0.5048
-0.5008
-0.5067
-0.5973
-0.5524
-0.6383
-0.6782
-0.7263
-0.5725
-0.6456
-0.7138
-0.6146
-0.5346
-0.5177
-0.4593
-0.6618
-0.5113
-0.5810
-0.6244
-0.5531
-0.5563
-0.5481
-0.6089
-0.4626
-0.7311
-0.6632
-0.6316
-0.7381
-0.6137
-0.6237
-0.4964
-0.4489
-0.6428
-0.5968
-0.5530
-0.3822
-0.6110
-0.4530
-0.4679
-0.5221
-0.6356
-0.4861
-0.7772
-0.5096
-0.5782
-0.7054
-0.6667
-0.6244
-0.7292
-0.7311
-0.6557
-0.6871
-0.6767
-0.5567
-0.6059
-0.6680
-0.7162
-0.6105
-0.5778
-0.6501
-0.6248
-1.0751
-0.6764
-0.7277
-0.5838
-0.5172
-0.6578
-0.6765
-0.7828
-0.5213
-0.5852
-0.6051
-0.6174
-0.6495
-0.7123
-0.6940
-0.5532
-0.6595
-0.5406
-0.7931
-0.5718
-0.5847
-0.6132
-0.6935
-0.6868
-0.6694
-0.5328
-0.6436
-0.6820
1.0943
-0.7218
-0.5527
-0.6364
-0.7036
-0.4954
-0.7242
-0.5977
-0.4918
-0.6130
-0.5662
-0.5606
-0.5216
-0.6229
-0.6632
-0.6878
-0.5284
-0.6767
-0.6877
-0.4909
-0.5646
-0.5312
-0.5946
-0.4761
-0.4790
-0.4377
-0.5075
-0.7755
-0.6303
-0.7138
-0.6351
-0.4867
-0.6949
-0.5841
-0.5315
-0.7750
-0.4143
-0.6275
-0.7366
-0.4332
-0.5265
-0.5596
-0.7054
-0.5708
-0.6828
-0.5689
-0.6370
-0.6888
-0.6580
-0.7045
-0.5881
-0.5664
-0.7268
-0.4533
-0.5892
-0.3759
-0.5606
-0.4196
-0.6223
-0.5858
-0.6233
-0.5599
-0.6126
-0.6092
-0.5808
-0.6016
-0.6788
-0.6251
-0.5953
-0.6111
-0.5539
-0.6734
-0.7272
-0.6097
-0.5319
-0.6154
-0.6616
-0.5251
-0.7204
-0.5141
-0.5327
0.4233
-0.5529
-0.5242
-0.6593
-0.4511
-0.6349
-0.3456
-0.6631
-0.5920
-0.5973
-0.6211
-0.7120
-0.6314
-0.5040
-0.6516
-0.6550
-0.7142
-0.5808
-0.6789
-0.6201
-0.4061
-0.5925
-0.5558
-0.5571
-0.4889
-0.5365
-0.5812
-0.4340
-0.6515
-0.7659
-0.7258
-0.6003
-0.5486
-0.5736
-0.6739
-0.7918
-0.7040
-0.7296
-0.5405
-0.7658
-0.7979
-0.4340
-0.4951
-0.6078
-0.5947
-0.4997
-0.6277
-0.7792
-0.5649
-0.5368
-0.5487
-0.4484
-0.5827
-0.6600
-0.6217
-0.4814
-0.5351
-0.7170
-0.6168
-0.5111
-0.5243
-0.5335
-0.5807
-0.5547
-0.6503
-0.4587
-0.5209
-0.6062
-0.6173
-0.5516
-0.6006
-0.6239
-0.5478
-0.4163
-0.4006
-0.6529
-0.6296
-0.6285
-0.6954
-0.6962
-0.6200
-0.6101
-0.7406
-0.5545
-0.6118
-0.4561
-0.7019
-0.4928
-0.6741
-0.5356
-0.5084
-0.5760
-0.7814
-0.6067
-0.5649
-0.7067
-0.5240
-0.5461
-0.6110
-0.4931
-0.5836
-0.5622
-0.6459
-0.6308
-0.9025
-0.7854
-0.5653
-0.7293
-0.5926
-0.5592
-0.7440
-0.7482
-0.6664
-0.7048
-0.7439
-0.5878
-0.8028
-0.6192
-0.4616
-0.5855
-0.6742
-0.7888
-0.6651
-0.3683
-0.6134
-0.4842
-0.3682
-0.6536
-0.7631
-0.6092
-0.4839
-0.6507
-0.3905
-0.6862
-0.4401
-0.6525
-0.5656
-0.6717
-0.7421
-0.7229
-0.4674
-0.5751
-0.5511
-0.5586
-0.5558
-0.4956
-0.7403
-0.6855
-0.3486
-0.6537
-0.4844
-0.6349
-0.5598
-0.5948
-0.4812
-0.5334
-0.5976
-0.6261
-0.6772
-0.7516
-0.5493
-0.3516
-0.5009
-0.6872
-0.5960
-0.7015
-0.7018
-0.6167
-0.6467
-0.6626
-0.6280
-0.5683
-0.4376
-0.5755
-0.4337
-0.7240
-0.6438
-0.7462
-0.5832
-0.4346
-0.4831
-0.7087
-0.6292
-0.5748
-0.6452
-0.6257
-0.5396
-0.6186
-0.6352
-0.8040
-0.6863
-0.5090
-0.5783
-0.4635
-0.6626
-0.6057
-0.5455
-0.6903
-0.4256
-0.6280
-0.6005
-0.6254
-0.6561
-0.6311
0.0700
-0.5386
-0.6303
-0.6126
-0.5625
-0.7177
-0.7150
-0.4435
-0.5034
-0.4601
-0.7134
-0.6045
-0.6175
-0.1311
-0.4606
-0.5279
-0.4703
-0.6170
-0.5399
-0.6533
-0.4893
-0.6054
-0.5828
-0.4321
-0.5590
-0.6121
-0.4322
-0.5652
-0.5552
-0.5527
-0.7014
-0.8307
-0.7236
-0.5809
-0.5889
-0.3087
-0.5081
-0.6251
-0.5052
-0.5977
-0.4824
-0.5811
-0.5692
-0.5516
-0.7365
-0.5498
-0.6327
-0.5069
-0.7376
-0.6093
-0.4489
-0.4150
-0.8244
-0.6467
-0.6521
-0.7903
-0.5506
-0.5928
-0.5616
-0.5244
-0.7687
-0.4464
-0.5756
-0.6635
-0.8333
-0.5849
-0.6492
-0.6747
-0.5918
-0.7004
-0.5709
-0.6782
-0.5677
-0.5274
-0.6032
-0.7329
-0.7639
-0.7142
-0.5434
-0.5954
-0.3667
-0.8357
-0.5958
-0.5528
-0.7045
-0.7305
-0.5834
-0.6888
-0.6295
-0.4671
-0.4950
-0.3886
-0.7052
-0.7428
-0.4397
-0.6197
-0.6044
-0.7213
-0.7726
-0.5705
-0.5343
-0.6056
-0.5059
-0.7181
-0.6812
-0.6400
-0.6280
-0.6755
-0.6645
-0.6709
-0.6787
-0.7700
[torch.FloatTensor of size 512]
), ('layer4.1.bn1.running_var',
0.1573
0.1215
0.1109
0.1383
0.1542
0.1192
0.1380
0.1565
0.1040
0.1227
0.1210
0.1088
0.1507
0.1229
0.1182
0.1031
0.1236
0.1420
0.2725
0.1137
0.1290
0.1197
0.1203
0.1205
0.1339
0.1468
0.0918
0.1423
0.1281
0.1053
0.1406
0.1438
0.1694
0.1591
0.1215
0.1267
0.1491
0.1544
0.1516
0.1206
0.1409
0.1487
0.1340
0.1212
0.1692
0.1352
0.1200
0.1342
0.1134
0.1254
0.1213
0.1793
0.1328
0.1295
0.1582
0.1491
0.1182
0.1430
0.1205
0.1282
0.1373
0.1034
0.1206
0.0871
0.0848
0.1330
0.2006
0.1261
0.1236
0.1034
0.1263
0.1355
0.1411
0.1224
0.1370
0.1594
0.1325
0.1217
0.1415
0.1338
0.1612
0.1261
0.1384
0.1487
0.1365
0.1152
0.1112
0.2107
0.1501
0.1499
0.1466
0.1247
0.1303
0.1796
0.1399
0.1382
0.1268
0.1446
0.1197
0.1814
0.1356
0.1555
0.1191
0.1518
0.1481
0.1677
0.1664
0.1375
0.1302
0.1575
0.1044
0.1489
0.1323
0.1657
0.1363
0.1650
0.1388
0.1572
0.1097
0.1532
0.1118
0.1582
0.1240
0.1371
0.1479
0.1441
0.1202
0.1072
0.1245
0.1441
0.1704
0.1055
0.1728
0.1529
0.1170
0.1075
0.1315
0.1389
0.1054
0.1395
0.1459
0.1136
0.1329
0.1357
0.1532
0.1205
0.1302
0.1067
0.1683
0.1299
0.1858
0.1210
0.1353
0.1287
0.1354
0.1247
0.1327
0.1452
0.1283
0.1384
0.1480
0.1390
0.1259
0.1230
0.1613
0.1562
0.1297
0.1444
0.1217
0.1486
0.1254
0.1371
0.1776
0.1259
0.1266
0.1432
0.1145
0.1126
0.1514
0.1623
0.1124
0.1401
0.1290
0.1095
0.1299
0.1432
0.1534
0.1387
0.1410
0.0994
0.1383
0.1388
0.1094
0.1412
0.1450
0.1345
0.1807
0.1669
0.1797
0.1446
0.1243
0.1500
0.1966
0.1428
0.1178
0.1329
0.1239
0.1487
0.1447
0.1233
0.1230
0.1421
0.1380
0.1261
0.1041
0.1362
0.1259
0.1976
0.1114
0.1175
0.0917
0.1185
0.1305
0.1413
0.1461
0.1311
0.1972
0.1353
0.1068
0.1346
0.1844
0.1347
0.1281
0.1329
0.1151
0.1365
0.1553
0.1578
0.1092
0.1538
0.1433
0.1616
0.1567
0.1194
0.1202
0.1098
0.1202
0.1117
0.1285
0.1747
0.1419
0.1255
0.1801
0.1364
0.1383
0.1418
0.1395
0.1644
0.1854
0.1917
0.1479
0.1276
0.1312
0.1268
0.1060
0.1379
0.1168
0.1260
0.1124
0.1565
0.1202
0.2099
0.1336
0.1320
0.1521
0.1538
0.1600
0.1196
0.1402
0.1164
0.1405
0.1027
0.1246
0.1134
0.1128
0.1233
0.1620
0.1926
0.1764
0.1251
0.1078
0.1889
0.1207
0.1188
0.1135
0.1182
0.1275
0.1302
0.1226
0.1329
0.2516
0.1389
0.1333
0.1921
0.1150
0.1574
0.1364
0.1345
0.1353
0.1439
0.1305
0.1363
0.2558
0.1277
0.1620
0.1404
0.1460
0.1436
0.2171
0.1201
0.1373
0.1306
0.1311
0.1308
0.1287
0.1161
0.2168
0.1480
0.1095
0.1442
0.1021
0.1147
0.1490
0.1643
0.1194
0.1053
0.1203
0.1042
0.1046
0.1572
0.1197
0.1417
0.1291
0.1438
0.1530
0.1424
0.1470
0.1178
0.1345
0.1161
0.1184
0.1133
0.1149
0.1338
0.1574
0.1732
0.1222
0.1317
0.1651
0.1592
0.1255
0.1714
0.1386
0.1274
0.1710
0.1602
0.1427
0.1191
0.1423
0.1244
0.1242
0.1345
0.1228
0.1578
0.1384
0.1870
0.1142
0.1692
0.1325
0.1416
0.1499
0.1259
0.1202
0.1246
0.1744
0.1611
0.1277
0.1228
0.1276
0.1105
0.1708
0.1367
0.1220
0.1172
0.1324
0.1184
0.2493
0.1403
0.1269
0.1254
0.1589
0.1228
0.1548
0.1620
0.1270
0.1219
0.1219
0.1449
0.1133
0.1800
0.1959
0.1330
0.1314
0.1067
0.1207
0.1086
0.1430
0.1113
0.1375
0.1652
0.1293
0.1261
0.1513
0.1269
0.1334
0.1213
0.1117
0.1366
0.1399
0.1310
0.1452
0.1476
0.1330
0.1173
0.1508
0.1389
0.1231
0.1381
0.1656
0.1119
0.1386
0.1126
0.1373
0.1122
0.1230
0.0956
0.1402
0.1565
0.1408
0.1206
0.1396
0.1236
0.1630
0.1353
0.1303
0.2084
0.1242
0.1122
0.1314
0.1330
0.1390
0.1695
0.1342
0.1609
0.1675
0.1445
0.1422
0.1566
0.1629
0.1685
0.1337
0.1291
0.1315
0.1648
0.1187
0.1312
0.1344
0.1582
0.1085
0.1148
0.1341
0.1757
0.1398
0.1169
0.1368
0.1972
0.1132
0.1293
0.1359
0.1159
0.1204
0.1141
0.1512
0.1578
0.1483
0.1096
0.1363
0.1530
0.1410
0.1818
0.1212
0.1304
0.1415
0.1201
0.1561
0.1149
0.1250
0.1123
0.1161
0.1466
0.1419
0.1714
[torch.FloatTensor of size 512]
), ('layer4.1.conv2.weight',
( 0 , 0 ,.,.) =
2.8729e-04 4.2632e-03 -2.0266e-03
1.9513e-04 2.4381e-03 -5.8632e-03
4.4803e-03 8.6577e-03 8.5538e-04
( 0 , 1 ,.,.) =
-1.1335e-02 -1.3195e-02 -1.0305e-02
-4.9507e-03 -4.5898e-03 -3.1041e-03
-7.5883e-03 -8.3795e-03 -8.9239e-03
( 0 , 2 ,.,.) =
-1.1914e-02 -1.2104e-02 -1.0167e-02
-1.2093e-02 -1.1557e-02 -8.9600e-03
-1.2515e-02 -9.3296e-03 -6.4079e-03
...
( 0 ,509,.,.) =
-9.3573e-03 -1.0662e-02 -1.2672e-02
-8.0600e-03 -8.5423e-03 -1.2121e-02
-8.1498e-03 -8.8037e-03 -1.0611e-02
( 0 ,510,.,.) =
4.2632e-03 5.6461e-03 2.8460e-03
4.7070e-03 6.2550e-03 7.5862e-03
1.1504e-02 1.1518e-02 1.0728e-02
( 0 ,511,.,.) =
-6.2455e-03 -9.1693e-03 -9.6664e-03
-4.2935e-03 -6.5311e-03 -5.0513e-03
-3.1141e-03 -5.0124e-03 -5.8122e-03
⋮
( 1 , 0 ,.,.) =
2.7483e-03 3.7146e-04 3.3262e-05
-4.5675e-03 -6.6689e-03 -6.4447e-03
-6.7610e-03 -7.3204e-03 -9.5855e-03
( 1 , 1 ,.,.) =
-1.4630e-02 -1.2320e-02 -1.4457e-02
-8.6197e-03 -5.8059e-03 -1.1075e-02
-6.2154e-03 -6.8218e-03 -9.3805e-03
( 1 , 2 ,.,.) =
1.0879e-03 4.3850e-04 -1.9456e-03
-1.2517e-03 3.2917e-04 -2.1435e-03
4.8136e-03 2.5333e-03 5.1504e-03
...
( 1 ,509,.,.) =
2.4644e-02 1.7434e-02 2.0734e-02
2.3101e-02 1.3487e-02 2.0728e-02
1.9381e-02 1.5243e-02 1.7340e-02
( 1 ,510,.,.) =
1.2212e-02 1.2448e-02 1.5048e-02
5.2993e-03 4.0090e-03 9.3927e-03
6.6766e-03 2.4941e-03 8.3288e-03
( 1 ,511,.,.) =
3.1040e-02 2.8243e-02 3.2319e-02
3.8608e-02 3.3099e-02 3.8652e-02
2.5839e-02 2.6524e-02 2.4995e-02
⋮
( 2 , 0 ,.,.) =
-2.1761e-03 4.5553e-03 2.0612e-03
4.9747e-03 1.1420e-02 8.5734e-03
4.8583e-03 1.1469e-02 1.0039e-02
( 2 , 1 ,.,.) =
-6.2547e-05 6.5336e-04 9.4747e-04
5.0603e-03 7.7136e-03 6.5484e-03
-4.8432e-04 2.3057e-03 2.9219e-03
( 2 , 2 ,.,.) =
-3.2788e-02 -2.7615e-02 -3.2608e-02
-3.6296e-02 -2.8170e-02 -3.0277e-02
-3.6814e-02 -3.1547e-02 -3.0231e-02
...
( 2 ,509,.,.) =
-5.2998e-03 -2.8590e-04 -4.9266e-03
-7.0530e-03 -2.3684e-04 -1.5838e-03
-6.9291e-03 4.8084e-04 -3.1548e-03
( 2 ,510,.,.) =
1.1854e-02 8.4836e-03 1.3839e-02
2.8741e-03 -9.7358e-05 4.4888e-03
-2.5515e-03 -2.7788e-03 -3.2464e-03
( 2 ,511,.,.) =
-1.2408e-02 -1.5001e-02 -1.3377e-02
-1.4540e-02 -1.8537e-02 -1.7392e-02
-6.7315e-03 -9.5205e-03 -9.0692e-03
...
⋮
(509, 0 ,.,.) =
3.0369e-03 1.9542e-03 1.7140e-03
-7.6240e-03 -2.8765e-03 -5.1760e-03
-9.3019e-03 -4.8800e-03 -4.2932e-03
(509, 1 ,.,.) =
4.4836e-03 2.4909e-03 1.5746e-03
1.2065e-02 1.2936e-02 1.0344e-02
1.9010e-02 1.7459e-02 1.5988e-02
(509, 2 ,.,.) =
-1.4914e-03 -8.1727e-03 -8.0671e-03
-6.6247e-03 -6.2421e-03 -9.2717e-03
-8.7991e-03 -7.7528e-03 -8.6336e-03
...
(509,509,.,.) =
-1.8040e-02 -1.5366e-02 -1.5334e-02
-1.3148e-02 -1.2180e-02 -1.0915e-02
-1.4545e-02 -1.4756e-02 -1.1787e-02
(509,510,.,.) =
3.5762e-03 6.6073e-03 -1.4055e-03
4.3975e-03 7.8375e-03 8.8085e-05
-5.0697e-03 -5.6633e-04 -5.9284e-03
(509,511,.,.) =
-1.9234e-03 -8.8012e-03 -5.8821e-03
3.6685e-03 -1.3784e-03 -3.2117e-03
-4.7037e-04 1.5340e-04 -3.4046e-03
⋮
(510, 0 ,.,.) =
-1.8305e-02 -1.7735e-02 -2.1683e-02
-1.6598e-02 -1.2508e-02 -2.0530e-02
-1.0800e-02 -9.8670e-03 -1.7195e-02
(510, 1 ,.,.) =
2.0721e-02 2.2466e-02 2.5049e-02
1.8682e-02 1.3160e-02 2.3696e-02
2.2104e-02 1.7261e-02 2.4877e-02
(510, 2 ,.,.) =
-5.7091e-03 -2.6876e-03 -9.2260e-04
-9.4530e-03 -7.0543e-03 -6.2770e-03
-4.5806e-03 -2.7182e-03 -2.5823e-03
...
(510,509,.,.) =
2.4150e-02 1.4002e-02 1.6559e-02
2.1363e-02 1.4359e-02 1.5854e-02
2.5786e-02 2.7233e-02 2.5104e-02
(510,510,.,.) =
-4.6450e-03 1.2419e-03 -1.8768e-03
1.3005e-03 4.0888e-03 -6.5483e-04
-7.9783e-03 -6.6539e-03 -8.9957e-03
(510,511,.,.) =
1.1494e-02 2.6621e-02 1.5649e-02
6.5960e-03 1.7290e-02 7.5466e-03
-8.0256e-03 4.6246e-03 -5.7808e-03
⋮
(511, 0 ,.,.) =
1.4232e-02 1.1769e-02 9.4342e-03
6.2592e-03 5.1087e-03 2.3311e-03
-1.9694e-03 2.7110e-03 -2.8945e-03
(511, 1 ,.,.) =
-7.0772e-03 1.0365e-03 -5.8451e-03
-9.1879e-03 -3.1388e-03 -8.1517e-03
-8.0300e-03 -5.1313e-03 -9.5734e-03
(511, 2 ,.,.) =
2.4314e-02 1.8942e-02 2.4256e-02
2.0090e-02 1.1472e-02 1.5993e-02
2.2910e-02 2.0622e-02 2.3820e-02
...
(511,509,.,.) =
-1.6375e-02 -1.6928e-02 -1.9019e-02
-9.7367e-03 -1.1274e-02 -1.0261e-02
-1.2310e-02 -1.5931e-02 -1.4151e-02
(511,510,.,.) =
4.7098e-03 -4.5205e-04 2.8042e-03
2.1428e-03 -4.6175e-03 -1.6818e-03
-1.3336e-03 -5.5009e-03 -2.6237e-03
(511,511,.,.) =
-1.4367e-02 -1.3520e-02 -1.1387e-02
-4.7420e-03 -1.7309e-03 -2.6426e-03
5.1448e-03 7.0428e-03 5.0202e-03
[torch.FloatTensor of size 512x512x3x3]
), ('layer4.1.bn2.weight',
1.8419
1.8307
1.7650
1.8288
1.9505
1.8026
1.9536
2.2790
1.7662
1.8902
1.7768
1.7749
1.9055
1.7328
1.8762
1.8211
1.7967
2.3428
1.7985
1.7271
1.7915
1.9512
1.8928
1.9017
1.8784
1.9809
1.8569
1.7830
1.8911
1.8859
1.7764
1.9832
1.8389
1.7616
1.8728
1.8753
1.9008
1.8209
1.7039
1.7377
1.7786
1.6944
1.7829
1.7815
1.7594
1.8428
1.9238
2.0871
1.8980
1.8413
1.8471
1.8584
1.7640
1.8453
1.7606
1.9504
1.9620
1.8755
1.9424
1.8731
1.8674
1.9422
1.8750
1.9208
1.7464
1.8558
1.6539
2.0660
2.0298
1.9174
1.8972
1.7589
1.7551
1.9560
1.7909
1.7971
1.7851
1.7733
1.8061
1.7949
1.8169
1.8089
1.8641
2.1542
1.7739
1.7913
1.8022
1.7155
1.7679
1.7704
1.6266
1.8645
1.9076
1.8576
1.6924
1.8020
1.7100
1.7713
1.8572
1.7103
2.0664
1.9054
1.9422
1.8078
1.7412
1.6061
1.9105
1.8947
1.7954
1.8989
1.8239
1.7619
1.7951
1.8149
1.8539
1.8502
1.7095
2.1831
1.8599
1.8252
1.8193
1.8460
1.7968
1.6229
1.8450
1.8290
1.8706
1.9293
1.6881
1.9725
1.8981
1.8925
1.8851
1.8445
1.9764
2.0674
1.8384
1.8414
1.8762
1.7931
1.7131
1.9644
1.7854
1.9369
1.8972
1.8940
1.8700
1.7967
1.8775
1.9409
1.7391
1.7944
1.9678
1.7678
1.6851
1.9414
1.9663
1.9882
1.7915
1.8141
1.8325
2.1200
1.9256
2.3592
2.0304
1.9594
1.7334
1.9048
1.8221
1.7811
1.9084
1.8053
1.9171
1.9644
1.8256
1.6432
1.9173
1.9094
1.9923
1.7963
1.9077
1.7619
2.1724
1.7931
1.7564
1.8889
1.9832
1.9136
1.8035
1.8419
1.8278
1.8057
1.9063
1.8646
1.7848
1.8230
1.7986
1.7091
1.7724
1.7939
1.7611
1.9325
2.0162
1.7295
2.0196
1.8876
1.8325
1.8225
1.7870
1.9160
1.7197
1.7170
1.9133
1.7770
1.9943
1.8389
1.8070
1.8516
1.7857
1.9648
1.9553
1.9232
1.8086
1.8114
1.7141
1.8058
1.8532
1.9255
1.7682
1.8314
1.8495
1.8296
1.8278
1.8819
1.7698
1.7838
1.7807
1.9974
1.6994
1.9483
1.7793
1.8029
2.2210
1.6455
1.8357
2.1706
1.9204
1.7414
1.7809
1.8648
1.9145
1.8849
1.8346
1.9368
1.8169
2.2302
1.8262
2.0651
1.9888
1.8169
1.8462
1.9681
1.8083
1.8595
1.8539
1.7699
1.9001
1.7285
1.7553
1.8924
1.7829
1.9428
1.8724
1.7228
2.0548
1.7732
1.8561
1.7699
1.9269
1.8171
2.4075
1.7257
1.7819
1.7244
1.8521
1.8302
1.8797
1.7617
1.9650
1.9807
1.7102
1.7486
1.8350
1.9919
1.8505
1.9000
1.8269
1.9787
1.7635
1.6071
1.7998
1.9545
1.7348
1.7140
1.8851
1.7981
1.9100
1.8315
1.7864
1.9165
1.8839
1.9017
1.9334
1.7405
1.7661
1.8015
1.9987
1.7622
1.9107
1.8444
1.7128
1.8726
1.8529
1.9270
1.8769
1.7261
1.8393
1.9075
1.7953
1.8246
1.7605
2.0470
1.9221
1.9205
1.8910
1.7666
1.6801
1.8308
1.8845
1.8339
1.8238
1.7616
1.6114
1.8411
1.7437
1.8423
1.9540
1.7465
1.7741
1.8746
1.8856
1.7740
1.7603
1.7682
1.8396
1.6869
1.8080
1.8836
1.8283
1.8341
1.8522
1.9749
1.8707
1.7719
1.8993
1.8108
1.8480
1.8267
1.8731
1.9576
1.8347
1.9509
1.9641
1.7997
1.7652
1.9253
1.7126
1.7551
1.9427
1.8559
1.9163
1.7681
1.7803
1.8500
1.8535
1.8865
1.7599
2.0692
1.8021
1.7077
1.8890
1.9457
1.8516
1.7882
1.8356
1.8472
1.6708
1.7435
1.9080
1.9653
2.0401
1.8935
1.8450
1.7536
1.7733
1.8135
1.8534
1.9368
1.7348
1.8738
1.9632
1.9033
1.7422
1.7842
1.8516
2.0218
1.7044
1.8793
1.8655
1.8516
1.8002
1.8687
1.8460
1.7589
1.8174
1.9830
1.9034
2.1222
1.8460
1.9209
1.8893
1.9422
1.8489
1.8396
1.9953
2.0865
1.8253
1.7700
1.8035
1.7535
1.8923
1.8620
1.8627
1.7264
1.8140
1.9613
1.8812
1.8729
2.0050
1.7092
1.7726
1.9410
1.8381
1.8366
1.7276
1.8796
1.7548
1.9536
1.8062
1.8883
2.0278
1.8775
1.9446
1.8676
1.8423
1.7798
1.9403
1.8375
2.0473
1.9507
1.8337
1.8184
1.7791
1.8993
1.8781
1.8691
1.8493
1.7623
1.9458
1.7564
1.7448
1.8633
1.6863
1.8062
1.8702
2.0048
1.8504
1.8964
1.9489
1.8264
1.9019
1.8196
1.9712
1.8969
1.8652
1.8709
1.6984
1.8677
1.8846
1.9256
1.8620
1.6366
1.8434
1.7506
1.8438
1.5788
1.9316
1.9535
1.7878
1.7354
2.0920
1.9456
[torch.FloatTensor of size 512]
), ('layer4.1.bn2.bias',
0.2371
0.3433
0.3279
0.4642
0.2233
0.2370
0.2176
0.3793
0.3140
0.2803
0.2434
0.2116
0.2478
0.2435
0.2298
0.3172
0.2725
0.6511
0.2925
0.2281
0.2279
0.4254
0.2342
0.3328
0.2632
0.2176
0.3180
0.3893
0.1387
0.2274
0.3379
0.0767
0.2253
0.2504
0.1990
0.1951
0.2566
0.3253
0.2797
0.3149
0.2373
0.2533
0.1956
0.3236
0.2093
0.2333
0.2300
0.5019
0.2830
0.1885
0.3264
0.2722
0.2369
0.2430
0.3625
0.2165
0.4700
0.3047
0.3675
0.2641
0.1979
0.2664
0.3448
0.2005
0.2450
0.4351
0.2689
0.1632
0.3087
0.1209
0.2153
0.1592
0.2960
0.1423
0.2951
0.2706
0.2007
0.2939
0.2210
0.2243
0.2465
0.3910
0.4599
0.5417
0.2147
0.3469
0.2703
0.2229
0.3645
0.2647
0.2421
0.2492
0.1666
0.2763
0.2560
0.2151
0.3363
0.2767
0.2516
0.2988
0.2622
0.3499
0.3001
0.3907
0.3184
0.2233
0.2649
0.2110
0.2034
0.2752
0.2314
0.3480
0.2238
0.2892
0.1991
0.2923
0.3259
0.0722
0.3039
0.3041
0.3803
0.2568
0.2382
0.3057
0.2652
0.1532
0.2110
0.2567
0.3148
0.2746
0.1833
0.1950
0.1116
0.2279
0.3705
0.2477
0.2000
0.3060
0.2548
0.2468
0.3028
0.1921
0.2952
0.1980
0.2135
0.1583
0.1586
0.3944
0.2352
0.3947
0.2740
0.2861
0.1856
0.2702
0.2986
0.1728
0.2658
0.2696
0.2028
0.1838
0.3176
0.6246
0.2631
0.3855
0.2074
0.2317
0.4171
0.2044
0.2926
0.3506
0.2305
0.2400
0.1420
0.1093
0.2757
0.3253
0.2334
0.1650
0.4026
0.2066
0.1790
0.3032
0.5658
0.3246
0.3834
0.3254
0.1772
0.2909
0.2350
0.2519
0.1968
0.2003
0.3213
0.4802
0.2543
0.2578
0.3280
0.2270
0.3044
0.2273
0.2447
0.2527
0.4136
0.2588
0.3589
0.2688
0.2115
0.2022
0.3186
0.3740
0.1785
0.2074
0.2346
0.3566
0.2623
0.2620
0.2880
0.1462
0.1896
0.2777
0.1852
0.3240
0.2748
0.2164
0.3066
0.1845
0.3992
0.1695
0.4411
0.2812
0.2730
0.2784
0.1861
0.3589
0.1934
0.3320
0.3350
0.2655
0.2740
0.3185
0.2633
0.2458
0.2003
0.2809
0.3049
0.2050
0.2904
0.2381
0.3278
0.3484
0.4293
0.2422
0.2859
0.1864
0.2954
0.5634
0.2081
0.3743
0.2902
0.3820
0.3069
0.2101
0.2750
0.2878
0.1870
0.3015
0.1661
0.2998
0.3101
0.2522
0.2419
0.1758
0.2681
0.2812
0.1495
0.2868
0.3157
0.2587
0.2437
0.1467
0.5416
0.2490
0.2831
0.2783
0.1614
0.1963
0.2034
0.2364
0.2527
0.1573
0.3184
0.2841
0.1613
0.1489
0.2850
0.1625
0.3277
0.4936
0.2780
0.3178
0.1743
0.2158
0.2222
0.2821
0.4267
0.2713
0.1778
0.3067
0.2270
0.1772
0.3897
0.2923
0.4843
0.2345
0.2327
0.2740
0.2700
0.2804
0.4035
0.1501
0.3329
0.3286
0.2803
0.2309
0.1738
0.3270
0.3097
0.1808
0.2384
0.2107
0.3240
0.3346
0.2236
0.2061
0.2687
0.2360
0.3338
0.2694
0.3203
0.2895
0.1884
0.1491
0.3957
0.5167
0.3407
0.1854
0.1816
0.2626
0.1855
0.2219
0.1482
0.2584
0.2458
0.2616
0.2396
0.2402
0.2423
0.3463
0.2731
0.1524
0.2514
0.2760
0.1734
0.2715
0.4052
0.2252
0.3676
0.3070
0.3127
0.1836
0.4330
0.2203
0.2073
0.2803
0.2984
0.2191
0.3272
0.2267
0.2749
0.3056
0.4566
0.2962
0.3528
0.3236
0.4220
0.2715
0.2256
0.2903
0.1829
0.3994
0.2820
0.2471
0.1647
0.3654
0.4504
0.2685
0.2992
0.2825
0.2435
0.2212
0.4300
0.4342
0.1988
0.2863
0.3398
0.2444
0.2905
0.2559
0.2586
0.1702
0.1906
0.2536
0.2978
0.2498
0.3777
0.2252
0.2472
0.2243
0.1732
0.2194
0.2091
0.2820
0.2898
0.2887
0.3292
0.1644
0.2962
0.3279
0.2535
0.2795
0.2238
0.2607
0.1937
0.2680
0.2418
0.5193
0.2502
0.3147
0.2166
0.2313
0.2027
0.1880
0.2180
0.3826
0.3871
0.2358
0.3556
0.2272
0.3272
0.3442
0.3154
0.1993
0.3135
0.2254
0.3048
0.2658
0.3337
0.2679
0.2670
0.2363
0.4347
0.1931
0.1995
0.2072
0.3202
0.2667
0.2305
0.2383
0.2246
0.2562
0.2837
0.4046
0.2786
0.2243
0.1591
0.1923
0.1894
0.2496
0.1140
0.3128
0.3197
0.3530
0.2999
0.2115
0.4718
0.2979
0.3472
0.2890
0.4740
0.2230
0.3630
0.4015
0.2446
0.1897
0.1460
0.1874
0.2734
0.2366
0.3001
0.2359
0.2688
0.3256
0.2749
0.2848
0.2299
0.3001
0.4818
0.3074
0.3164
0.3114
0.3549
0.2859
[torch.FloatTensor of size 512]
), ('layer4.1.bn2.running_mean',
1.00000e-02 *
-1.3953
-5.0031
-4.3323
-1.5914
-4.6112
-2.3473
-2.5429
-3.3783
5.1665
-3.5325
-3.7555
-1.3353
-3.4012
-0.2871
-4.3814
-2.6424
-3.0987
-9.1183
1.5193
-3.9889
0.3516
-6.3124
-2.8069
-3.8168
-0.5747
-1.9639
-2.0180
-4.1006
-3.7068
-4.9702
-2.0847
-3.5155
-2.2799
-3.5089
-2.5835
-2.6871
-3.3089
-5.3280
-2.9554
-2.7207
-7.9410
-3.8662
-7.0901
-0.4792
-4.0081
-4.4518
-0.9688
3.9220
-4.1579
-3.5060
-2.7755
-0.7651
-4.4367
-2.6813
-1.2360
-3.5112
-3.1672
-2.4467
-6.0395
-4.6648
-5.3290
-2.8216
-5.4557
-5.4704
-2.8591
-5.0634
0.0627
-5.1950
-5.1578
-3.6758
-3.0772
-3.9569
-1.9722
-4.3900
-4.4507
-5.3416
-4.3945
-2.3374
-5.1497
-4.4268
-3.9613
0.7135
-3.1644
3.4458
-3.8945
-3.8628
-2.9412
-3.6697
-3.3454
-0.3520
-2.3919
-1.5737
-1.8832
-3.6160
-3.0676
-3.5423
-3.6338
-6.0085
-2.4744
-1.0668
-6.4177
-4.9577
-5.2484
-5.4054
-0.5603
-1.1281
-5.2175
-4.2486
-2.8311
-3.6422
-0.5653
-2.8168
-2.9531
-3.7204
-6.7556
-3.1953
-4.5689
4.5595
-3.0731
-2.3350
-3.2033
-1.5946
-4.3791
0.4781
0.0364
-3.1503
-2.3126
-3.7362
-4.6890
-2.7394
-5.6134
-4.9824
-4.3524
-2.7824
-7.0928
-6.0755
-5.0579
-2.8704
-2.6274
-3.3160
-0.8618
-3.9907
-3.5256
-8.6042
-2.6312
-3.2020
-1.4972
-1.1586
-3.8802
-6.7321
-4.5730
-1.3368
-2.9202
-1.5672
-5.5057
-4.7705
-2.6542
-1.3914
-1.9433
-3.6511
-5.1134
-4.3920
-3.3364
4.1027
-3.2706
-0.0820
-1.9290
-4.7500
-4.4132
-3.8169
-2.4048
-4.1317
-2.0381
-3.9825
-4.3505
-1.3664
-3.0153
-3.2162
-5.1351
-4.3963
-0.1310
-2.0620
-4.1151
-7.4645
-1.3569
-4.2029
-3.6559
-5.5496
-2.4927
-4.3322
-2.5386
-0.5925
0.9121
-4.6321
-4.9662
-1.3392
-4.9198
-2.2978
-1.3565
-4.4778
-4.1518
-5.3186
-6.2418
-3.3953
-1.8224
-4.7834
-2.5541
-1.6724
-6.6561
-5.5189
-2.9102
0.1744
-2.8708
-4.7356
-3.9403
-5.2492
-4.8850
-3.3341
-2.7483
-6.1212
-4.5193
-3.5821
0.8330
-1.7342
0.1103
-6.2373
-2.4603
-7.2638
2.1412
-7.5782
-3.2325
-2.4850
-2.2635
-0.8499
-2.4250
0.6696
-1.6815
-5.7800
-4.0070
-2.5381
-2.6095
-3.6127
-4.1404
-4.7404
-0.1727
-5.8207
-6.2922
-4.1185
-2.7714
-2.6942
-1.9350
-0.2645
-4.6726
-4.4284
-2.1652
-4.8506
-4.0399
-3.5572
-4.6054
-3.1532
-3.2670
-4.3606
-5.3407
-2.9613
-6.5983
-3.6677
-1.5673
-4.1916
0.7200
-2.6574
-2.9427
-4.6752
-1.6942
-1.7730
-3.1830
-2.0861
-4.2271
-3.7406
-3.8363
-4.3299
0.7099
-1.6024
-1.1558
-1.3649
-1.9286
-0.5381
-3.8080
-4.0525
-3.7919
-3.8805
-7.5134
-1.3963
0.2917
-1.8857
-1.8787
-1.8889
-3.9999
-4.7723
-1.5847
-3.8556
-0.3824
-4.1886
-2.2822
-2.7051
-1.2578
-1.2243
-1.0389
-3.0908
-4.9441
-5.7127
-4.3721
-2.9496
-2.8846
-3.4347
-3.3969
-1.8485
-2.0259
-0.6510
-3.2701
-1.2320
-4.2393
-4.4799
-2.5397
-3.7255
-3.0126
-3.3043
1.1193
-3.4983
-3.1577
-3.6480
-1.0290
2.8495
-3.6363
-0.4754
-5.7458
-1.7357
-0.9672
-2.6240
-3.2853
-0.1027
-5.6348
-1.9716
-6.3584
1.1124
-2.9937
-2.5287
-3.9320
0.2933
-4.8010
-3.1270
-3.5449
-2.9562
-1.7369
-5.0306
-4.5947
-1.7834
-0.6932
-0.3274
-4.8955
-3.5086
-2.4075
-2.1984
-0.7037
-3.7546
-3.1797
-2.4134
-3.3352
-3.2565
-1.6909
-2.2290
-3.9201
-1.0906
-5.6042
-4.8766
-3.0840
-3.4916
-3.8825
-4.3324
-5.6847
1.0243
-4.6514
-1.0452
-0.2154
-4.4856
-3.6066
-1.7105
-0.5236
-2.8570
-5.0284
-6.5926
-1.6846
-3.1785
-6.2677
-5.5734
-3.5885
-1.4798
-4.3455
-1.3114
-3.5012
-2.1125
-7.3286
-4.1934
-1.7432
-3.5229
3.5735
-3.0858
-4.1892
-3.5874
0.1710
-1.8882
-3.2570
-6.8433
-0.2356
-4.5632
0.1103
-7.9181
-1.5563
-2.2546
-1.9013
-4.7557
-1.5476
-4.5174
-2.5230
-3.1111
-1.7632
-1.1193
-1.6986
-6.3783
-2.5520
0.9365
-2.4927
-5.2760
-6.2665
-2.4147
-5.4109
-6.4714
-1.9359
-1.4110
-4.5960
-4.2290
-2.9651
-1.1331
-4.9568
-4.5198
-4.6655
-3.9152
-7.2373
-2.8233
-3.3341
0.0050
-2.4896
-0.4391
-5.7027
-1.6781
-4.1684
-4.3151
-1.1696
-3.2351
-0.9796
-1.0248
-3.1722
-3.1369
-4.3368
-4.1376
-1.7700
-6.5839
-4.4930
-3.0312
-4.9151
-3.4421
-2.9603
-2.7210
-2.1330
-3.6309
-2.3335
-4.0678
-1.2841
-3.0524
-6.1549
-4.6466
-6.2686
-3.6889
-3.9056
-3.4740
-0.6074
-1.9422
-4.2960
-3.7847
-7.7137
-2.9199
-5.4336
-2.5864
-3.2088
-5.0267
-2.6562
-1.9347
-1.2865
-3.7129
-3.3561
-5.6942
-2.3849
-0.7705
-5.8456
0.0136
-6.6229
-2.9168
-0.3950
-5.2685
-1.9541
-5.8807
-4.5790
-3.0423
[torch.FloatTensor of size 512]
), ('layer4.1.bn2.running_var',
1.00000e-02 *
1.2607
1.2795
1.2836
1.3783
1.2441
1.3147
1.4444
1.6157
1.2308
1.1641
1.1995
1.1215
1.2360
1.0052
1.2017
1.3942
1.3127
1.8412
1.2599
1.0842
1.2449
1.3412
1.3765
1.3621
1.2877
1.2920
1.2716
1.2023
1.4954
1.2628
1.2342
1.2743
1.2353
1.1137
1.2544
1.2324
1.4762
1.2765
1.1890
1.0633
1.2721
1.0859
1.2173
1.2396
1.3669
1.2910
1.2683
2.0786
1.2164
1.1582
1.2351
1.3041
1.0677
1.5028
1.1270
1.2414
1.5545
1.4291
1.3015
1.1886
1.1764
1.7043
1.1810
1.4155
1.1373
1.2751
1.0263
1.3853
1.5373
1.2052
1.3857
1.1107
1.1257
1.3424
1.1654
1.3275
1.2267
1.1632
1.1624
1.1821
1.1366
1.4051
1.1627
1.5670
1.0272
1.2229
1.3182
1.1980
1.1770
1.1470
0.9519
1.3395
1.2046
1.3805
1.1765
1.2712
1.1828
1.1465
1.2209
1.1313
1.2856
1.2779
1.2912
1.2170
1.1401
1.1912
1.4428
1.4182
1.3258
1.5467
1.1182
1.1008
1.1993
1.3008
1.3681
1.3370
1.1496
1.6639
1.1855
1.2463
1.2111
1.3034
1.2276
0.9981
1.2321
1.1815
1.2773
1.1727
1.1281
1.3318
1.4112
1.2649
1.0986
1.2151
1.5177
1.3746
1.2133
1.3573
1.2481
1.3561
1.0291
1.2488
1.1282
1.2459
1.3162
1.3991
1.2794
1.5236
1.1475
1.4152
1.1746
1.1560
1.4177
1.1815
1.0985
1.4292
1.3252
1.3664
1.2592
1.1000
1.2848
2.0357
1.2684
3.0875
1.6966
1.3806
1.0805
1.1598
1.2627
1.2273
1.2620
1.1659
1.2492
1.2681
1.2751
1.0844
1.4885
1.3681
1.2171
1.1670
1.3635
1.1769
1.8156
1.1138
1.2628
1.5029
1.5616
1.3909
1.1480
1.1303
1.2139
1.1750
1.5876
1.3656
1.0394
1.2946
1.2624
1.1957
1.3235
1.1265
1.1500
1.2635
1.5489
0.9593
1.4590
1.4663
1.3447
1.2776
1.2686
1.2837
1.0899
1.0744
1.1642
1.1800
1.4411
1.2817
1.1857
1.1787
1.0723
1.4497
1.3106
1.3415
1.3301
1.2676
1.1392
1.2345
1.2399
1.2312
1.3118
1.1824
1.2197
1.2423
1.2812
1.3887
1.1381
1.1887
1.0703
1.5483
1.2125
1.2822
1.1870
1.2034
1.7866
1.0217
1.2893
1.4659
1.2393
1.1389
1.1972
1.2732
1.3072
1.2994
1.2904
1.2871
1.1562
1.4855
1.2064
1.4229
1.2749
1.5885
1.3019
1.2125
1.2583
1.1958
1.3250
1.0867
1.3941
1.0751
1.2574
1.2344
1.0800
1.1533
1.3274
1.1349
1.4498
1.2250
1.2234
0.9903
1.1828
1.2083
1.5951
1.0767
1.1830
1.3225
1.1655
1.1856
1.2551
1.1889
1.2027
1.3007
1.1249
1.2834
1.3066
1.4390
1.3390
1.1616
1.3649
1.3628
1.2689
0.9673
1.3976
1.2583
1.1835
1.2000
1.4709
1.3959
1.2518
1.3496
1.2184
1.4348
1.2852
1.2958
1.3992
1.1663
1.0442
1.1392
1.3530
1.2199
1.3925
1.2103
1.0940
1.2331
1.4481
1.2432
1.1955
1.1361
1.3141
1.3357
1.0638
1.1367
1.1926
1.5863
1.3304
1.2212
1.3405
1.1748
1.0780
1.1570
1.3548
1.3191
1.1238
1.1355
1.1769
1.4076
1.0655
1.1557
1.2413
1.1456
1.1505
1.2523
1.1101
1.1558
1.1428
1.0822
1.1301
1.1807
1.2160
1.2464
1.1496
1.2547
1.4902
1.4602
1.2770
1.2263
1.4406
1.2328
1.1850
1.2651
1.3965
1.4678
1.2244
1.2105
1.2584
1.1940
1.0827
1.3151
1.1509
1.1410
1.3750
1.2897
1.4835
1.2276
1.1962
1.2476
1.3449
1.3318
1.2557
1.6294
1.2615
1.1391
1.0025
1.3623
1.2699
1.1068
1.3502
1.2616
1.1090
1.2450
1.3262
1.2724
1.6779
1.3447
1.1733
1.2772
1.3858
1.2996
1.3405
1.2329
1.2411
1.2495
1.3410
1.1526
1.1101
1.1719
1.2455
1.3591
1.0963
1.2343
1.1039
1.2518
1.1693
1.1699
1.3526
1.2257
1.2567
1.4976
1.2528
1.3554
1.1318
1.2526
1.2288
1.3581
1.1964
1.2445
1.4404
1.6388
1.2757
1.2317
1.1435
1.1726
1.3039
1.2119
1.3858
1.1201
1.2956
1.2951
1.2869
1.2629
1.6022
1.1351
1.1411
1.4286
1.2237
1.2991
1.2031
1.1916
1.0642
1.3661
1.3933
1.2715
1.4832
1.1984
1.2630
1.1473
1.3745
1.1393
1.1939
1.3160
1.3901
1.2581
1.1313
1.0755
1.1584
1.2514
1.2153
1.3047
1.1249
1.1903
1.2367
1.1338
1.2559
1.1869
1.2105
1.0223
1.5068
1.3862
1.0991
1.2486
1.2651
1.0860
1.3252
1.5014
1.2576
1.2565
1.2202
1.4279
1.0337
1.3899
1.3158
1.2282
1.4694
1.0891
1.4762
1.0859
1.0720
1.1243
1.5002
1.2772
1.1317
1.2571
1.6188
1.3516
[torch.FloatTensor of size 512]
), ('fc.weight',
-1.8474e-02 -7.0461e-02 -5.1772e-02 ... -3.9030e-02 1.7351e-01 -4.0976e-02
-8.1792e-02 -9.4370e-02 1.7355e-02 ... 2.0284e-01 -2.4782e-02 3.7172e-02
-3.3164e-02 -5.6569e-02 -2.4165e-02 ... -3.4402e-02 -2.2659e-02 1.9705e-02
... ⋱ ...
-1.0300e-02 3.2804e-03 -3.5863e-02 ... -2.7923e-02 -1.1458e-02 1.2759e-02
-3.5879e-02 -3.5296e-02 -2.9602e-02 ... -3.2961e-02 -1.1022e-02 -5.1256e-02
2.1277e-03 -2.4839e-02 -8.2920e-02 ... 4.1731e-02 -5.0030e-02 6.6327e-02
[torch.FloatTensor of size 1000x512]
), ('fc.bias',
1.00000e-02 *
-0.2634
0.3000
0.0656
⋮
-1.7868
-0.0782
-0.6345
[torch.FloatTensor of size 1000]
)])
In [ ]:
"""
Test image folder training datasets
https://discuss.pytorch.org/t/questions-about-dataloader-and-dataset/806
https://github.com/pytorch/examples/blob/master/imagenet/main.py
"""
In [ ]:
"""
Test: load partial state dict
"""
model = G_model(BasicBlock, [2, 2, 2, 2])
pretrained_dict = pretrained_resnet18.state_dict()
model_dict = model.state_dict()
# 1. filter out unnecessary keys
pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
# 2. overwrite entries in the existing state dict
model_dict.update(pretrained_dict)
# print model_dict
# 3. load the new state dict
model.load_state_dict(model_dict)
# print model.state_dict()
# x = model.layer1[0].relu
# mynet = myNet(model)
# print mynet.conv1.weight
# print mynet.conv1.bias
In [125]:
loaded = torch.load('/Users/albertxavier/.torch/models/resnet18-5c106cde.pth')
del loaded[u'conv1.weight']
print loaded
mynet.load_state_dict(loaded)
print "after..............."
print mynet.conv1.weight
# print mynet.conv1.bias
OrderedDict([(u'bn1.running_mean',
2.7681e-03
-2.5769e-02
2.1254e-07
-8.4605e-02
2.1121e-08
4.9691e-04
-2.2408e-02
-1.1582e-07
-4.8239e-03
2.7507e-07
3.9582e-02
3.1994e-02
-3.7490e-02
-1.3716e-06
6.6002e-03
4.3782e-03
6.4797e-02
1.1176e-01
3.6002e-02
-7.5075e-02
-3.8240e-02
8.4358e-02
-5.2287e-02
-1.1799e-02
1.3019e-03
3.2172e-02
-1.7784e-02
-9.1009e-02
1.1319e-01
-4.1632e-02
8.7302e-03
2.9693e-02
-7.0502e-02
-3.4847e-03
1.0977e-01
-1.7341e-03
-5.9423e-08
2.9330e-02
-7.8553e-09
6.7320e-03
-3.7100e-03
1.6028e-02
-2.7883e-02
2.6593e-02
2.8475e-02
-1.2735e-01
4.4617e-02
2.6329e-02
2.1454e-08
-1.7045e-02
-3.5617e-03
-4.5841e-02
6.3876e-02
1.5220e-02
-3.8511e-02
-1.6428e-02
-1.6569e-02
5.6057e-02
-8.0306e-02
-2.6646e-03
-4.1718e-02
1.2611e-01
-4.9237e-02
-1.3261e-02
[torch.FloatTensor of size 64]
), (u'bn1.running_var',
1.0169e+00
3.7167e+00
5.8133e-11
3.2825e+00
1.7107e-13
6.5823e-01
4.3701e+00
6.6005e-12
9.1552e-01
1.9318e-09
4.1256e+00
2.7440e+00
2.8391e+00
4.7966e-08
1.1072e+01
5.0075e-01
2.2313e+00
4.8257e+00
2.6986e+00
9.3700e+00
3.7339e+00
5.4843e+00
5.7127e+00
4.4544e-01
4.3628e-01
7.1563e+00
1.3718e+01
5.2512e+00
6.8174e+00
1.6724e+00
1.6534e+00
1.2325e+00
4.9076e+00
3.0731e+00
4.2384e+00
4.9936e+00
1.4465e-12
1.5212e+00
1.0352e-13
3.5134e-01
1.7025e-01
1.4205e+00
1.9085e+00
2.1512e+00
2.6608e+00
4.8444e+00
1.9297e+00
1.4999e+00
2.9481e-13
1.5306e+00
3.6503e-01
2.9376e+00
5.4664e+00
7.0792e-01
3.3315e+00
7.7180e-01
2.4068e+00
6.5214e+00
4.1263e+00
1.0506e+00
2.9530e+00
1.1366e+01
4.7690e+00
1.6559e+00
[torch.FloatTensor of size 64]
), (u'bn1.weight', Parameter containing:
2.3487e-01
2.6626e-01
-5.1096e-08
5.1870e-01
3.4404e-09
2.2239e-01
4.2289e-01
1.3153e-07
2.5093e-01
1.5152e-06
3.1687e-01
2.5049e-01
3.7893e-01
1.0862e-05
2.7526e-01
2.3674e-01
2.4202e-01
3.9531e-01
4.6935e-01
2.9090e-01
2.7268e-01
2.7803e-01
2.9069e-01
2.0693e-01
2.5899e-01
2.7871e-01
2.9115e-01
3.1601e-01
3.8889e-01
3.0411e-01
2.6776e-01
2.1093e-01
2.8708e-01
3.3243e-01
4.2673e-01
3.7326e-01
7.4804e-08
1.9068e-01
1.4740e-08
2.2303e-01
1.7908e-01
2.4860e-01
2.7400e-01
2.5923e-01
2.9420e-01
2.9924e-01
2.2369e-01
2.6280e-01
2.2001e-08
2.6610e-01
2.2089e-01
2.8429e-01
3.3072e-01
2.2681e-01
3.6538e-01
2.1230e-01
2.3965e-01
2.4950e-01
5.2583e-01
2.4825e-01
2.9565e-01
2.5878e-01
4.8326e-01
2.6670e-01
[torch.FloatTensor of size 64]
), (u'bn1.bias', Parameter containing:
2.3072e-01
2.5382e-01
-1.0543e-06
-6.6439e-01
-1.6571e-08
1.6152e-01
4.5450e-01
-4.3020e-07
3.0051e-01
-8.0052e-06
3.4942e-01
3.1148e-01
-2.4953e-01
-3.4749e-05
1.0773e-01
2.1897e-01
3.8141e-01
-5.2988e-01
-6.2864e-01
5.7140e-01
2.9985e-01
5.8430e-01
4.8202e-01
3.2853e-01
1.9672e-01
1.9496e-01
1.5215e-01
8.5522e-02
5.1314e-01
1.5237e-02
1.6644e-01
3.3239e-01
2.4921e-01
4.4337e-01
-2.8017e-01
-2.0385e-02
-2.4507e-07
3.2134e-01
-4.9152e-08
2.3777e-01
2.3291e-01
3.1527e-01
4.2776e-01
2.9313e-01
2.6379e-01
6.7598e-01
4.2910e-01
3.4566e-01
-8.6909e-08
2.4729e-01
3.0316e-01
6.1577e-01
3.9835e-01
3.3207e-01
-4.1219e-01
3.7807e-01
1.7895e-01
2.5748e-01
-4.4908e-01
2.1306e-01
5.6934e-01
5.7274e-01
-4.0238e-01
2.3406e-01
[torch.FloatTensor of size 64]
), (u'layer1.0.conv1.weight', Parameter containing:
(0 ,0 ,.,.) =
5.7593e-02 -9.5114e-02 -2.0272e-02
-7.4556e-02 -7.9931e-01 -2.1284e-01
6.5571e-02 -9.6534e-02 -1.2111e-02
(0 ,1 ,.,.) =
-6.9944e-03 1.4266e-02 5.5824e-04
4.1238e-02 -1.6125e-01 -2.3208e-02
3.2887e-03 7.1779e-03 7.1686e-02
(0 ,2 ,.,.) =
-2.3627e-09 -3.9270e-08 -3.2971e-08
2.1737e-08 8.3299e-09 1.2543e-08
1.1382e-08 8.8096e-09 1.5506e-08
...
(0 ,61,.,.) =
-3.6921e-02 1.8294e-02 -2.9358e-02
-9.8615e-02 -4.3645e-02 -5.2717e-02
-7.9635e-02 2.9396e-02 4.1479e-03
(0 ,62,.,.) =
1.6948e-02 1.3978e-02 9.6727e-03
1.4297e-02 -6.6985e-04 -2.2077e-02
1.2398e-02 3.5454e-02 -2.2320e-02
(0 ,63,.,.) =
-2.2600e-02 -2.5331e-02 -2.3548e-02
6.0860e-02 -9.6779e-02 2.4057e-02
-1.2750e-02 9.2237e-02 4.0152e-03
⋮
(1 ,0 ,.,.) =
2.2160e-02 4.2177e-02 -1.6428e-02
-2.9667e-02 5.6865e-02 2.5486e-02
4.3847e-03 5.1188e-02 1.0436e-02
(1 ,1 ,.,.) =
2.5342e-02 5.4374e-02 5.3888e-02
-2.8334e-02 -2.0139e-01 -5.6358e-02
5.6774e-02 7.4188e-02 2.1585e-02
(1 ,2 ,.,.) =
-3.1458e-08 3.5335e-08 5.3791e-08
-2.6896e-08 5.1530e-08 5.4480e-08
-3.8487e-08 -1.1234e-08 -7.5787e-09
...
(1 ,61,.,.) =
-1.2754e-01 4.3552e-02 -6.5607e-02
-6.0462e-02 1.5989e-01 -7.7070e-03
-9.4202e-02 5.0750e-02 -7.8154e-02
(1 ,62,.,.) =
-3.3309e-02 1.6631e-03 -8.8497e-03
1.5553e-02 -5.8277e-02 -2.7437e-02
1.3126e-02 -3.0268e-02 -2.1661e-03
(1 ,63,.,.) =
-4.2313e-03 3.4517e-02 3.8193e-03
5.4317e-02 -1.2457e-02 3.2900e-02
2.2000e-04 1.6040e-02 1.2764e-01
⋮
(2 ,0 ,.,.) =
-3.5247e-02 8.0748e-03 2.0353e-02
1.7344e-02 -2.4320e-02 -1.5511e-04
-2.7634e-04 2.8024e-02 -2.3777e-03
(2 ,1 ,.,.) =
-2.3741e-02 -3.2057e-03 -5.7059e-03
-1.1582e-02 1.7200e-03 2.1067e-02
4.3606e-03 -4.6459e-02 -7.2954e-02
(2 ,2 ,.,.) =
3.1002e-08 5.3568e-08 3.1873e-08
-1.6063e-08 -1.8072e-08 -1.9508e-09
-5.8339e-08 -4.5366e-08 -1.2395e-08
...
(2 ,61,.,.) =
-1.9689e-03 -2.6809e-02 -4.3760e-02
2.4518e-02 -2.8396e-02 -3.5896e-02
-1.7883e-04 -2.4661e-02 -2.0085e-02
(2 ,62,.,.) =
2.1551e-02 2.2789e-03 -2.5823e-02
2.3272e-02 -7.9333e-03 -2.0814e-03
-5.7062e-03 -2.6934e-02 -1.4421e-02
(2 ,63,.,.) =
-1.9674e-02 2.7914e-02 -2.0025e-02
6.3222e-02 -3.9077e-02 -3.3220e-03
-2.7434e-02 1.1390e-02 -3.1608e-03
...
⋮
(61,0 ,.,.) =
4.3440e-03 -7.6970e-03 -6.4950e-02
1.3846e-02 -2.2803e-02 -4.6478e-02
2.7776e-02 1.6080e-02 -1.3363e-02
(61,1 ,.,.) =
4.7379e-02 -2.4982e-02 -2.7605e-02
7.0091e-02 4.2084e-03 -1.0805e-01
1.7526e-02 4.5647e-02 7.8810e-03
(61,2 ,.,.) =
2.6680e-09 2.7671e-08 2.4702e-08
6.3905e-09 4.1020e-08 3.3631e-08
5.8335e-09 1.3334e-08 9.6604e-09
...
(61,61,.,.) =
4.5900e-03 4.7084e-02 -8.6949e-03
-6.3011e-03 5.9585e-02 5.8667e-03
-2.0255e-02 4.3285e-02 4.5094e-03
(61,62,.,.) =
1.1253e-03 -5.7461e-03 -6.8411e-03
6.0616e-03 7.3295e-03 -1.1784e-02
-1.1455e-03 5.1868e-03 -1.9867e-02
(61,63,.,.) =
1.7529e-02 4.4606e-02 -2.6595e-02
2.2102e-02 4.5857e-02 2.3347e-02
1.8052e-02 5.9689e-02 1.7129e-02
⋮
(62,0 ,.,.) =
-2.9112e-02 3.4242e-03 -1.7523e-02
-2.3682e-02 2.2716e-02 -3.8301e-02
-1.0308e-02 -4.3802e-03 -2.3582e-02
(62,1 ,.,.) =
-4.9607e-02 -3.2724e-03 -1.5345e-02
-1.3524e-02 5.4842e-02 1.1187e-02
-2.3549e-02 -2.8495e-02 -6.6371e-02
(62,2 ,.,.) =
-4.9804e-08 -2.8211e-08 -2.0583e-08
-5.2389e-08 -2.8522e-08 -3.5099e-08
-3.2171e-08 -3.4110e-08 -4.3153e-08
...
(62,61,.,.) =
3.4487e-03 2.6532e-02 -1.1202e-02
7.0925e-03 3.7903e-02 -3.2481e-02
4.1381e-02 3.2329e-02 2.8309e-03
(62,62,.,.) =
-6.5955e-03 1.6476e-02 2.1810e-02
-1.2293e-02 2.2310e-02 1.2645e-02
-8.9897e-03 1.1948e-03 -5.2390e-03
(62,63,.,.) =
-2.5295e-03 7.2689e-02 -7.8046e-03
-4.2221e-02 7.9756e-02 -2.7738e-02
4.6716e-03 -5.6596e-02 -8.2261e-02
⋮
(63,0 ,.,.) =
5.2235e-02 3.5231e-03 -3.3131e-02
3.1048e-02 1.6193e-02 1.7283e-02
1.4446e-02 2.4302e-02 -1.9689e-03
(63,1 ,.,.) =
-2.4717e-02 8.3009e-03 -6.1336e-02
-1.6134e-02 5.5323e-02 -6.5029e-02
-2.4715e-02 1.0030e-03 3.2437e-02
(63,2 ,.,.) =
1.8496e-08 5.2798e-09 4.1820e-08
3.7489e-08 2.5450e-08 3.0419e-08
1.1246e-08 -5.6956e-09 -2.0008e-08
...
(63,61,.,.) =
7.1194e-03 -4.1052e-02 -1.0002e-02
2.5924e-02 -6.3819e-02 1.3366e-02
2.9751e-02 -7.9476e-03 1.4007e-02
(63,62,.,.) =
-2.5166e-03 2.2051e-02 -1.9967e-02
-5.9436e-02 4.3872e-02 2.6832e-02
-1.7509e-02 2.4625e-02 2.4822e-02
(63,63,.,.) =
3.5832e-02 -7.0357e-02 3.9452e-03
-2.9835e-02 9.2727e-02 1.9336e-02
-2.9145e-02 -9.7087e-03 -7.3388e-02
[torch.FloatTensor of size 64x64x3x3]
), (u'layer1.0.bn1.running_mean',
-0.4332
-0.1757
0.0307
-0.7058
-1.6364
-0.7989
-0.0678
-0.1956
-1.1260
-0.9578
0.0030
-1.8265
-0.0393
-0.8680
-1.1062
-0.6359
-0.9872
-0.5778
-1.3349
-0.3408
-1.1982
-1.6058
-2.1702
-0.8814
-0.8175
-0.6951
0.6542
-1.6422
0.2811
0.3163
-0.4123
-1.4023
-1.5044
-2.5031
-2.1580
-1.3645
-0.8579
-0.2206
-2.5548
-2.2695
-0.1609
-0.8552
0.5289
1.3492
-0.9382
-0.3356
-2.9168
-1.5967
-1.8875
-1.6166
-1.9443
-2.0195
-0.9671
-1.3881
-1.8836
0.1869
-1.3487
-0.4593
-0.4542
-0.9032
-0.0768
-1.7719
1.2484
-0.9139
[torch.FloatTensor of size 64]
), (u'layer1.0.bn1.running_var',
0.4351
0.2044
0.2344
0.5559
0.9626
0.3484
0.0871
0.6851
0.4714
1.2642
0.1519
0.6730
0.2430
0.5577
0.8701
0.2419
0.2052
0.8149
0.3040
0.2617
0.8060
0.8007
1.5581
0.2404
0.4445
0.6765
0.5562
0.9378
0.2584
0.3173
0.0962
0.4118
0.5197
0.9767
1.2703
0.8908
0.3609
0.2227
1.1588
1.5965
0.4060
0.2559
0.1763
0.2797
0.3757
0.1282
1.8280
0.3145
0.7419
0.2129
0.8122
0.4660
0.4065
0.4914
0.4814
0.1697
0.4000
0.3867
0.1499
0.4137
0.0671
0.8303
0.2434
0.3449
[torch.FloatTensor of size 64]
), (u'layer1.0.bn1.weight', Parameter containing:
0.3090
0.2147
0.2366
0.4259
0.5137
0.2181
0.2204
0.2300
0.2640
0.2695
0.2138
0.4602
0.2661
0.2319
0.3900
0.2389
0.2660
0.3634
0.3474
0.2477
0.3285
0.5349
0.6440
0.2275
0.4482
0.3078
0.2604
0.4651
0.2179
0.2858
0.3426
0.4420
0.4450
0.4500
0.5516
0.5092
0.2564
0.2634
0.5664
0.6410
0.2228
0.1986
0.2460
0.2242
0.2143
0.1982
0.6368
0.3106
0.5049
0.2403
0.3065
0.3760
0.3794
0.4281
0.2991
0.3326
0.2596
0.3345
0.2006
0.4351
0.1683
0.5149
0.2629
0.3254
[torch.FloatTensor of size 64]
), (u'layer1.0.bn1.bias', Parameter containing:
0.1657
0.2420
0.1780
-0.0431
-0.2053
0.1598
0.2929
0.0912
0.1116
0.0884
0.1104
-0.2035
0.1539
0.0857
-0.1094
0.0654
0.0766
-0.2067
-0.0212
0.1396
0.0401
-0.2827
-0.3257
-0.0035
-0.4373
-0.1248
0.1282
-0.0874
0.1199
-0.0829
-0.5315
-0.0780
-0.3876
-0.0547
-0.1816
-0.1888
0.1320
0.0031
-0.2697
-0.2984
0.1394
0.2597
0.1372
0.0053
0.0132
0.3295
-0.2715
-0.0187
-0.2467
0.1579
0.0165
-0.0890
-0.1903
-0.0787
0.1700
-0.4832
0.0619
-0.0677
0.3125
-0.5064
0.3138
-0.2617
-0.1545
0.0063
[torch.FloatTensor of size 64]
), (u'layer1.0.conv2.weight', Parameter containing:
(0 ,0 ,.,.) =
2.5947e-02 -1.0458e-01 -4.7712e-03
-8.6223e-02 -3.3021e-01 -1.0275e-01
-5.7426e-02 -1.9074e-01 -5.4646e-02
(0 ,1 ,.,.) =
-1.6951e-02 2.1384e-02 -2.1074e-03
-3.2983e-03 4.5014e-02 -1.1510e-02
-5.9602e-02 6.4942e-03 2.9080e-03
(0 ,2 ,.,.) =
-4.4903e-03 1.9637e-02 1.3167e-02
1.3050e-02 -7.7471e-03 1.1931e-02
1.3454e-02 1.1103e-02 5.5145e-03
...
(0 ,61,.,.) =
1.2706e-03 -7.7438e-03 2.0753e-02
-4.0024e-02 -4.0383e-02 -3.4821e-02
-2.0251e-02 -9.5164e-03 1.3954e-02
(0 ,62,.,.) =
-2.3430e-03 3.2303e-02 -4.3342e-03
8.6194e-03 1.0553e-02 1.8074e-03
-1.2760e-02 -1.0232e-02 4.5711e-03
(0 ,63,.,.) =
1.5302e-02 2.1361e-02 -7.0908e-03
-1.4221e-02 4.5979e-02 2.1369e-02
3.1312e-02 6.6428e-02 2.1465e-02
⋮
(1 ,0 ,.,.) =
5.3422e-02 4.0515e-02 9.6680e-03
3.2884e-02 -2.3474e-02 3.4642e-02
-1.2861e-02 5.0066e-02 5.4579e-02
(1 ,1 ,.,.) =
2.8764e-02 4.3431e-02 2.8258e-02
2.8734e-02 -3.5459e-02 -5.2788e-02
-5.5119e-02 -7.1813e-02 -8.2970e-02
(1 ,2 ,.,.) =
9.5293e-02 1.2549e-01 -6.4001e-02
-4.1166e-02 -9.0480e-04 5.1387e-02
-1.1311e-01 -7.9823e-02 1.4373e-01
...
(1 ,61,.,.) =
-7.6924e-03 2.0647e-02 1.9521e-02
-6.7352e-03 1.2601e-04 4.8309e-03
-6.2405e-03 -9.2119e-03 -2.5806e-04
(1 ,62,.,.) =
-2.6153e-02 -2.4641e-02 4.0970e-02
-1.9164e-02 -1.0160e-02 3.3163e-02
5.4200e-03 9.0485e-04 6.7799e-04
(1 ,63,.,.) =
7.7762e-03 2.6447e-02 6.3650e-02
-3.0608e-02 2.4959e-02 1.2951e-02
-2.0938e-02 -7.7342e-03 -3.8790e-02
⋮
(2 ,0 ,.,.) =
1.0893e-02 -1.4409e-02 1.5730e-02
1.6655e-02 4.4535e-02 6.3212e-02
3.4121e-02 7.3135e-02 5.9203e-02
(2 ,1 ,.,.) =
2.3195e-03 7.7598e-03 2.0308e-02
2.0457e-02 4.0029e-02 3.4744e-02
-4.7356e-02 -3.7286e-02 1.4542e-02
(2 ,2 ,.,.) =
-2.2742e-02 -1.9000e-02 -8.4317e-03
-9.8759e-04 2.1510e-02 6.3959e-03
-9.4558e-03 2.6833e-03 -3.1136e-02
...
(2 ,61,.,.) =
-7.5787e-03 -1.6056e-02 -6.4204e-04
-5.5104e-03 1.4252e-02 4.5000e-02
-9.2800e-03 2.2351e-02 4.1728e-02
(2 ,62,.,.) =
2.5705e-02 4.8207e-02 7.9145e-02
-4.4350e-03 3.8872e-03 4.1694e-02
8.0536e-04 -1.0601e-02 9.2706e-03
(2 ,63,.,.) =
-3.3892e-02 9.3543e-03 4.1746e-02
-1.6470e-02 3.9542e-03 6.2438e-02
-3.1055e-02 -3.6302e-03 7.0817e-02
...
⋮
(61,0 ,.,.) =
-7.1044e-05 -9.0020e-03 -2.6998e-03
3.0072e-03 1.1579e-02 1.5214e-02
3.4832e-03 1.1353e-05 1.6320e-02
(61,1 ,.,.) =
-2.6334e-02 2.1967e-02 -6.0039e-02
4.4519e-02 1.3203e-01 -9.1163e-03
5.4242e-02 1.3726e-01 2.7454e-02
(61,2 ,.,.) =
1.7122e-02 3.7646e-03 1.4872e-02
1.2092e-02 1.1319e-02 3.4667e-02
8.1790e-03 -2.0805e-02 2.7143e-02
...
(61,61,.,.) =
-1.0111e-02 -1.0526e-02 2.8394e-02
-2.5112e-02 -2.2196e-02 3.7229e-02
-3.8220e-02 -4.6644e-02 1.5660e-02
(61,62,.,.) =
-2.5913e-03 -2.4307e-02 1.0611e-02
-2.1730e-02 -4.3938e-02 -7.1536e-03
-2.5171e-02 -5.9467e-02 -2.5577e-02
(61,63,.,.) =
2.8652e-02 2.5850e-04 1.1416e-03
3.7812e-02 -1.1271e-03 9.6027e-03
3.9350e-02 1.0134e-02 1.0449e-02
⋮
(62,0 ,.,.) =
-7.9305e-03 7.0872e-03 2.1412e-02
-6.0065e-02 1.4147e-03 9.7281e-02
-6.0130e-02 -2.1725e-02 3.6863e-02
(62,1 ,.,.) =
2.8024e-02 2.6183e-02 -2.3027e-02
5.1900e-02 -2.0588e-03 -1.0940e-01
-3.2729e-02 -6.2752e-03 8.0630e-03
(62,2 ,.,.) =
-1.8062e-02 -1.9510e-02 4.3163e-02
4.6080e-02 2.9494e-02 4.0844e-02
5.9607e-03 -6.5891e-03 -6.4623e-03
...
(62,61,.,.) =
2.2193e-02 8.4653e-03 3.6764e-03
1.7549e-02 2.1971e-02 -4.5108e-03
2.1124e-02 3.4591e-02 -1.6310e-02
(62,62,.,.) =
3.8144e-02 4.8395e-02 -9.5556e-02
1.8923e-02 1.1341e-02 -7.6311e-02
4.7358e-03 3.2138e-02 -7.4777e-02
(62,63,.,.) =
-1.9031e-02 -3.2568e-02 -3.8251e-02
1.0705e-02 2.3121e-03 -7.5078e-02
3.3316e-02 3.5515e-02 -2.1023e-03
⋮
(63,0 ,.,.) =
-1.3330e-01 7.4683e-02 -3.8624e-03
9.1377e-02 8.2415e-02 3.9469e-02
-1.8265e-02 -5.9943e-02 8.9354e-02
(63,1 ,.,.) =
1.5566e-02 -4.1716e-02 1.0633e-02
7.2644e-03 3.1934e-02 1.2732e-03
-2.0851e-02 -3.7593e-03 -7.0170e-02
(63,2 ,.,.) =
-6.6139e-02 1.0627e-01 1.9590e-02
5.4987e-02 -1.5552e-01 -1.8819e-02
-4.2554e-03 4.4964e-02 -2.4632e-02
...
(63,61,.,.) =
-6.1691e-02 -4.5531e-02 -9.1721e-03
4.3995e-02 4.5703e-02 -7.0108e-02
1.1388e-02 4.4678e-02 -4.5953e-02
(63,62,.,.) =
4.3432e-03 2.3194e-02 -2.1895e-02
-8.0216e-02 -5.7606e-02 -9.8455e-03
-3.3285e-02 -1.1468e-01 -2.3779e-02
(63,63,.,.) =
-6.3785e-02 -2.4485e-02 -4.9061e-02
-6.1594e-02 1.0328e-01 5.9685e-03
8.1863e-02 -3.0314e-02 -4.6373e-03
[torch.FloatTensor of size 64x64x3x3]
), (u'layer1.0.bn2.running_mean',
0.1677
0.0052
0.7868
0.1670
0.4774
-0.0362
-0.0578
0.0020
-0.3420
0.0674
-0.0970
-0.1000
-0.0659
-0.0385
-0.0709
-0.2218
-0.1343
0.1198
-0.0602
0.0006
0.0864
-0.1466
0.0354
-0.1539
-0.0416
-0.4317
0.0330
-0.0797
-0.5868
-0.2461
-0.1505
-0.4012
-0.2187
-0.6351
0.0698
0.1412
0.2754
-0.5645
-0.1113
-0.2414
-0.1313
-0.0215
0.1264
-0.1684
0.1693
-0.4684
-0.6908
0.2044
0.0015
-0.0543
-0.1487
0.1262
-0.1145
-0.1597
0.1230
-0.1605
-0.2256
-0.0995
-0.1621
0.3230
-0.0055
0.1091
0.0781
0.0121
[torch.FloatTensor of size 64]
), (u'layer1.0.bn2.running_var',
0.1130
0.0412
0.0335
0.1282
0.2084
0.0307
0.0606
0.0737
0.0313
0.0409
0.1376
0.0399
0.0437
0.0282
0.1588
0.0288
0.0837
0.0799
0.0177
0.1839
0.0884
0.3054
0.1512
0.0394
0.0374
0.0969
0.1719
0.0610
0.0607
0.1560
0.0448
0.1236
0.0464
0.1005
0.0498
0.0481
0.0450
0.1229
0.0623
0.0381
0.0229
0.1227
0.1656
0.1047
0.1316
0.1834
0.0622
0.1272
0.1929
0.0419
0.0263
0.2623
0.0712
0.1442
0.0937
0.0983
0.1163
0.1511
0.1009
0.0342
0.1854
0.0698
0.0631
0.0350
[torch.FloatTensor of size 64]
), (u'layer1.0.bn2.weight', Parameter containing:
0.2496
0.2198
0.2756
0.6073
0.2654
0.2942
0.1136
0.4425
0.2868
0.2974
0.2506
0.4103
0.4855
0.3383
0.4670
0.1772
0.2171
0.5025
0.2263
0.3667
0.4867
0.4586
0.4652
0.2200
0.1510
0.2761
0.3813
0.2803
0.2382
0.3953
0.3032
0.3163
0.2025
0.2323
0.2003
0.1661
0.4690
0.3476
0.3414
0.2274
0.2485
0.2356
0.2726
0.4657
0.3429
0.2465
0.4674
0.2812
0.6241
0.4152
0.3403
0.4218
0.1152
0.2985
0.5802
0.2795
0.4706
0.4517
0.4303
0.2749
0.3427
0.1137
0.5069
0.4370
[torch.FloatTensor of size 64]
), (u'layer1.0.bn2.bias', Parameter containing:
0.2275
0.0087
-0.0673
-0.0688
0.3598
-0.2017
-0.0000
0.0237
0.3955
0.0371
0.0069
0.2758
-0.0703
-0.2397
-0.0818
-0.0941
-0.1454
0.0373
-0.3617
-0.3956
-0.4079
0.0036
-0.2788
-0.0353
-0.0703
0.2101
-0.0046
-0.1966
-0.2807
-0.0165
0.2645
-0.0894
-0.2105
-0.1303
0.1721
0.0534
-0.2230
-0.0480
0.2457
0.2095
0.1622
0.1137
0.1146
-0.1487
-0.0322
-0.3055
0.4912
0.1087
0.0128
0.1004
0.4155
-0.0147
0.0239
0.0998
-0.1727
0.1008
-0.1456
-0.2274
0.1364
0.2013
-0.0574
0.2353
-0.1130
0.3093
[torch.FloatTensor of size 64]
), (u'layer1.1.conv1.weight', Parameter containing:
(0 ,0 ,.,.) =
1.9712e-02 -5.2562e-03 -3.7619e-03
-1.9635e-02 -1.2336e-02 -3.5196e-02
5.0761e-02 7.5668e-02 4.3344e-02
(0 ,1 ,.,.) =
1.4160e-02 -8.6094e-03 -1.0541e-02
-4.2586e-02 -2.3814e-02 -5.4694e-02
-1.4018e-03 4.6720e-02 5.0898e-02
(0 ,2 ,.,.) =
2.1559e-02 4.1633e-03 -9.7118e-03
-9.3201e-03 -2.5432e-02 -2.8274e-02
-3.0107e-02 -4.8230e-02 -2.6001e-02
...
(0 ,61,.,.) =
5.4300e-03 9.1875e-02 3.1938e-03
-1.7945e-02 5.7266e-02 -8.4098e-03
-3.4961e-02 -2.3296e-02 -3.5089e-02
(0 ,62,.,.) =
2.5603e-02 -3.1689e-02 -5.4160e-02
6.9736e-02 -1.0716e-02 -6.8034e-02
3.5578e-02 3.4749e-02 -1.9334e-02
(0 ,63,.,.) =
-6.5420e-02 -4.6427e-03 -2.3362e-02
7.5833e-02 9.1174e-03 -4.9701e-02
6.2944e-02 -9.8735e-02 3.3158e-02
⋮
(1 ,0 ,.,.) =
-9.0557e-03 -3.0753e-02 1.1953e-02
-3.2539e-02 -6.2846e-03 -2.0235e-02
4.7996e-03 -2.1462e-02 -4.1557e-03
(1 ,1 ,.,.) =
1.7163e-02 -2.3303e-03 7.3972e-02
-3.2105e-02 -7.7536e-02 -1.2648e-02
3.8985e-02 -4.3170e-02 1.0904e-02
(1 ,2 ,.,.) =
-2.9643e-02 -5.8534e-02 -5.9736e-02
-2.9437e-02 -3.6441e-02 -1.2380e-02
-2.2775e-02 -2.4485e-03 -1.6124e-03
...
(1 ,61,.,.) =
2.6830e-02 1.4267e-02 6.2658e-02
3.0585e-04 -5.3241e-03 3.2786e-03
2.1097e-02 -2.3189e-02 1.2102e-02
(1 ,62,.,.) =
-6.1182e-02 -2.9227e-02 2.0036e-02
-7.6089e-02 -7.7057e-02 8.6544e-02
-3.9228e-02 -3.2361e-02 -8.8970e-02
(1 ,63,.,.) =
-1.3372e-01 8.8362e-02 8.3836e-02
-1.1688e-02 4.3156e-01 -3.3629e-03
-2.3925e-02 -1.0092e-01 -1.0184e-01
⋮
(2 ,0 ,.,.) =
8.0165e-02 4.3042e-02 2.7325e-03
3.5269e-02 -1.5504e-02 -3.5011e-02
-1.7164e-02 -2.6827e-02 -3.3946e-02
(2 ,1 ,.,.) =
4.5439e-02 5.1585e-02 1.8321e-02
-3.9647e-02 2.3956e-02 -2.6609e-02
-3.0358e-02 -6.4729e-02 2.5834e-02
(2 ,2 ,.,.) =
3.8105e-02 4.0986e-02 4.1005e-02
1.7584e-02 -1.6494e-02 -3.2716e-02
5.5886e-03 -1.7068e-02 -3.0605e-02
...
(2 ,61,.,.) =
-1.3694e-01 -1.4074e-01 5.1423e-02
-1.2521e-01 -1.3128e-01 7.5733e-02
-4.5032e-02 -1.7081e-02 7.1252e-02
(2 ,62,.,.) =
6.3381e-02 1.5874e-02 -2.7322e-02
8.0356e-02 3.6104e-02 -2.8506e-02
2.6638e-02 2.2021e-02 3.2345e-02
(2 ,63,.,.) =
-1.2068e-03 -4.6179e-02 -1.5351e-02
-1.1276e-02 1.9200e-02 3.4336e-02
1.6540e-02 -7.8592e-03 -2.5392e-02
...
⋮
(61,0 ,.,.) =
3.3384e-02 6.9963e-02 1.0745e-02
-1.7518e-02 -5.3524e-02 -6.4960e-02
3.4248e-04 -4.5557e-02 -4.7336e-02
(61,1 ,.,.) =
-5.1031e-03 7.9784e-03 -8.6553e-04
-1.6557e-03 1.4661e-02 5.3365e-03
-3.1784e-02 -6.6940e-02 -4.6889e-02
(61,2 ,.,.) =
-1.1775e-02 7.2759e-03 7.6622e-03
-6.1288e-02 -5.2078e-02 -4.5152e-02
-8.6584e-02 -9.7381e-02 -1.0405e-01
...
(61,61,.,.) =
2.1243e-02 6.2456e-02 2.5188e-02
-2.2911e-02 -2.1100e-03 -2.7573e-02
4.6557e-02 6.4980e-02 3.1879e-02
(61,62,.,.) =
6.2867e-03 2.4255e-02 8.9674e-02
-7.7718e-03 -5.4311e-02 -4.6843e-02
-6.7499e-03 -6.6857e-02 -4.9842e-02
(61,63,.,.) =
4.7326e-03 -3.9533e-02 1.1500e-03
-2.7957e-02 -1.3466e-01 -6.0753e-02
-3.2010e-03 7.2213e-02 1.1009e-01
⋮
(62,0 ,.,.) =
2.3763e-02 -1.7876e-02 -7.4843e-03
1.6239e-02 5.4479e-04 -3.3735e-02
-2.2854e-02 -1.4316e-03 1.1010e-02
(62,1 ,.,.) =
5.2277e-03 -2.5941e-03 5.9594e-03
-2.9058e-03 -7.3409e-03 3.0652e-02
7.5540e-02 6.6445e-03 2.5518e-03
(62,2 ,.,.) =
-6.5970e-02 -4.1286e-02 -3.0278e-02
-3.5108e-02 -3.9099e-02 -1.6818e-02
-1.0224e-02 -8.6995e-03 -5.9939e-04
...
(62,61,.,.) =
2.1233e-02 -2.4559e-02 -7.4436e-03
-4.3734e-03 -3.2864e-02 -3.3453e-02
8.9269e-03 -1.7646e-02 3.8375e-04
(62,62,.,.) =
-7.8930e-02 -7.2940e-02 -6.7911e-02
-8.4146e-02 -8.3657e-02 5.3666e-02
-3.5577e-02 -3.6835e-02 5.8987e-03
(62,63,.,.) =
8.3767e-02 8.0476e-05 7.2164e-02
-6.4219e-02 -1.2661e-01 4.6026e-02
9.3033e-02 -4.7521e-02 3.6777e-02
⋮
(63,0 ,.,.) =
4.1012e-02 1.3361e-03 -5.8616e-02
4.2461e-02 2.9437e-03 -2.0445e-02
7.6097e-02 5.2504e-02 -5.5636e-03
(63,1 ,.,.) =
2.2046e-02 4.0888e-03 1.4645e-02
-7.7532e-02 -1.1912e-01 -7.0892e-02
-1.0618e-02 -3.2121e-02 -2.3969e-02
(63,2 ,.,.) =
-2.1612e-02 -2.6110e-03 -3.1664e-02
-3.2892e-02 -3.9771e-02 -5.1463e-02
-2.6150e-02 -3.6554e-02 -2.3315e-02
...
(63,61,.,.) =
4.4600e-03 8.4181e-02 2.3199e-02
5.7595e-02 1.3036e-01 3.2172e-02
-2.2774e-03 4.2065e-02 -4.8619e-02
(63,62,.,.) =
3.1533e-02 -4.3655e-02 2.0361e-02
3.9973e-03 -5.1430e-02 -6.3839e-02
6.4002e-03 4.5347e-02 4.7346e-02
(63,63,.,.) =
-9.1818e-02 1.0264e-02 9.6565e-02
-2.1635e-03 -2.3452e-02 -5.9038e-02
1.9402e-02 2.8854e-02 -9.6113e-02
[torch.FloatTensor of size 64x64x3x3]
), (u'layer1.1.bn1.running_mean',
-0.6534
0.9240
-1.3403
-0.7395
-0.5830
-1.6717
-0.3376
0.1913
-0.4565
-0.7877
-0.3756
-0.2295
-1.7003
-0.6135
0.5422
-0.1072
-0.2315
-0.3775
-1.8026
-0.7210
-0.0288
-1.2585
-1.8144
0.0504
-0.0739
-1.5506
-1.5092
-1.0623
0.1706
0.1527
0.3983
-2.9065
-0.9070
-0.2983
-1.8404
-2.3956
0.2241
-0.0760
-0.9525
-1.4632
0.7657
-0.3832
0.8590
-1.3211
-1.2599
-0.1220
-0.2230
0.5071
1.0262
-0.5969
-0.0104
-1.4013
-0.4267
-0.9979
-1.9458
0.1991
-0.8841
-0.8302
-0.3076
-2.0759
-1.2645
0.2679
0.4349
-1.2568
[torch.FloatTensor of size 64]
), (u'layer1.1.bn1.running_var',
0.7111
0.5543
0.6143
0.5148
0.2840
0.4924
0.3536
0.3939
0.2511
0.4859
0.1803
0.7468
0.4225
0.3686
0.1719
0.2777
0.3676
0.2311
0.3515
0.4917
0.1393
0.1732
0.6248
0.3038
0.1599
0.5246
0.2410
0.5096
0.5251
0.5369
0.1800
1.0623
0.4006
0.2060
0.5194
0.4981
0.4250
0.2616
0.8252
0.4991
0.3290
0.3642
0.2716
0.6520
0.4492
0.2753
0.3377
0.3167
0.3830
0.4624
0.4098
0.5566
0.5048
0.4747
0.6820
0.4387
0.3506
0.2995
0.5595
0.6855
0.5260
0.6478
0.4960
0.5449
[torch.FloatTensor of size 64]
), (u'layer1.1.bn1.weight', Parameter containing:
0.3910
0.4375
0.3746
0.3990
0.3404
0.3503
0.2618
0.2707
0.2865
0.4308
0.1895
0.3041
0.3837
0.2944
0.2105
0.3304
0.2943
0.2887
0.2060
0.4627
0.2335
0.1831
0.4489
0.2830
0.3389
0.2997
0.3503
0.2735
0.3908
0.2817
0.2636
0.4462
0.3282
0.3776
0.4471
0.3878
0.2516
0.3172
0.3661
0.3166
0.3818
0.3128
0.2274
0.3627
0.2902
0.2381
0.2988
0.2469
0.3840
0.2886
0.3197
0.2879
0.3218
0.4559
0.3500
0.2420
0.3396
0.3519
0.3839
0.3806
0.4039
0.2826
0.4594
0.3342
[torch.FloatTensor of size 64]
), (u'layer1.1.bn1.bias', Parameter containing:
-0.0997
-0.4755
-0.0474
-0.2698
-0.0834
-0.0072
0.0474
0.1022
-0.0170
-0.1471
0.2307
0.1447
-0.1775
0.0273
0.1559
-0.1836
0.1238
-0.1522
0.0554
-0.2881
-0.2606
0.2316
-0.3242
-0.0219
-0.2645
0.0576
-0.2465
0.0481
-0.3530
0.0950
-0.1862
-0.1707
-0.0161
-0.2604
-0.3145
-0.1083
0.0659
-0.1427
-0.0570
-0.0076
-0.3006
-0.0744
-0.0683
-0.1104
0.0253
0.0489
-0.2515
0.1150
-0.3783
0.0846
-0.0368
0.1439
-0.0468
-0.3087
-0.0240
0.1397
-0.0908
-0.1795
-0.1129
-0.0793
-0.1491
0.0594
-0.4433
-0.0138
[torch.FloatTensor of size 64]
), (u'layer1.1.conv2.weight', Parameter containing:
(0 ,0 ,.,.) =
-2.1574e-02 -4.5688e-03 4.5483e-03
-8.1870e-03 4.1740e-02 2.3010e-02
-8.9283e-03 5.7352e-02 2.9818e-02
(0 ,1 ,.,.) =
5.8627e-02 4.2864e-02 4.4912e-02
2.2281e-02 -1.2969e-02 7.6099e-03
4.5373e-02 3.0712e-02 3.7700e-02
(0 ,2 ,.,.) =
-1.5456e-02 -3.8692e-02 -4.6010e-02
-2.3123e-02 2.8293e-02 4.7790e-03
-2.0328e-02 1.3756e-02 2.5883e-02
...
(0 ,61,.,.) =
5.1302e-02 4.2291e-02 5.7833e-02
4.5210e-02 5.5850e-02 1.4318e-02
1.4241e-02 1.7968e-02 1.4344e-02
(0 ,62,.,.) =
4.6012e-03 1.2566e-02 4.8931e-02
-6.5754e-03 -2.6431e-02 1.5855e-02
1.3192e-02 1.9011e-02 1.3842e-02
(0 ,63,.,.) =
6.1983e-02 6.9919e-02 6.1035e-02
6.1253e-02 9.9557e-02 5.9060e-02
5.8298e-02 8.1652e-02 8.1499e-02
⋮
(1 ,0 ,.,.) =
-1.0088e-02 -1.2959e-02 9.7798e-03
5.5408e-02 4.3501e-02 5.6983e-02
5.3427e-02 3.5118e-02 3.6782e-02
(1 ,1 ,.,.) =
2.4442e-03 -3.0207e-02 -1.0377e-02
-4.5297e-02 -4.5318e-02 5.4623e-03
-4.4762e-02 -1.5508e-02 6.9745e-03
(1 ,2 ,.,.) =
3.9658e-02 3.6838e-02 5.8796e-03
2.3207e-02 3.9240e-03 -2.0887e-02
-1.4829e-02 5.3606e-03 1.7404e-03
...
(1 ,61,.,.) =
3.2160e-02 5.9042e-02 4.8433e-02
-2.6464e-02 -8.0667e-03 -1.0359e-02
-2.6699e-02 -9.5411e-03 -2.8902e-02
(1 ,62,.,.) =
-2.9235e-02 -3.9078e-02 -4.4955e-02
-2.0346e-02 -4.4891e-02 -3.7477e-02
1.9653e-02 -1.5562e-03 -5.8245e-03
(1 ,63,.,.) =
-5.0696e-02 -4.8902e-02 9.1631e-03
5.1668e-03 2.0509e-02 6.6874e-02
2.8934e-02 4.6717e-02 2.1371e-02
⋮
(2 ,0 ,.,.) =
2.1744e-02 -2.8354e-02 -3.2557e-02
3.0519e-02 1.8536e-02 1.5244e-02
1.3832e-03 1.7051e-02 3.2020e-02
(2 ,1 ,.,.) =
-3.6293e-02 1.0914e-02 4.5371e-02
1.3399e-02 6.4272e-02 8.8210e-02
4.6697e-02 9.9653e-02 8.7606e-02
(2 ,2 ,.,.) =
-2.4336e-02 -2.9627e-02 1.9537e-02
-3.3412e-02 -2.2290e-02 -2.8879e-02
1.4765e-02 1.7234e-02 -1.8185e-02
...
(2 ,61,.,.) =
-3.9859e-02 -7.1075e-02 -5.8546e-02
2.2902e-02 1.1184e-02 -2.3654e-02
8.1897e-02 1.1996e-01 9.3242e-02
(2 ,62,.,.) =
3.1984e-02 7.4931e-02 6.6020e-02
2.8490e-02 1.1931e-01 1.2100e-01
7.9259e-04 4.3812e-02 4.4648e-02
(2 ,63,.,.) =
3.2748e-02 4.1444e-02 -8.1932e-03
4.5541e-02 2.9426e-02 -8.5440e-03
1.1634e-04 1.8045e-03 1.4826e-02
...
⋮
(61,0 ,.,.) =
-4.4144e-02 -8.3106e-02 -5.3073e-02
3.2124e-02 1.0286e-02 2.4409e-02
6.1606e-03 -1.9455e-02 4.0534e-02
(61,1 ,.,.) =
5.6026e-04 9.6961e-03 2.5010e-03
7.1679e-03 -1.7535e-02 -2.3857e-02
-9.8745e-03 -1.8550e-02 1.7301e-03
(61,2 ,.,.) =
4.3882e-03 4.2049e-02 7.5950e-02
-6.5610e-02 -3.6130e-02 -1.9404e-02
-3.8091e-02 -2.6749e-02 -1.3865e-02
...
(61,61,.,.) =
-4.5593e-02 -4.6050e-02 -2.2809e-02
-9.7648e-03 2.4910e-03 2.4503e-02
2.0381e-02 5.2393e-02 6.9019e-02
(61,62,.,.) =
9.3306e-04 1.2483e-02 -1.1817e-02
-1.2627e-02 -1.8756e-02 -1.4144e-03
-5.2490e-03 -4.6126e-03 -1.3224e-02
(61,63,.,.) =
7.4689e-04 -1.0135e-02 -7.8264e-03
1.2491e-02 -2.5865e-02 4.0514e-02
5.8855e-03 4.5990e-02 1.0651e-01
⋮
(62,0 ,.,.) =
1.2262e-02 -1.5378e-02 1.3862e-03
4.1166e-02 -2.4944e-02 -2.6686e-02
-1.7423e-02 5.2690e-03 -2.1861e-02
(62,1 ,.,.) =
-3.1207e-02 -3.3025e-02 2.2114e-02
-2.4009e-02 1.2988e-02 2.2430e-02
1.0332e-02 4.3601e-03 4.7321e-03
(62,2 ,.,.) =
2.0182e-02 6.1569e-02 -2.8771e-02
5.8231e-02 4.6767e-02 -2.8417e-05
3.7545e-02 -4.5886e-02 1.5849e-02
...
(62,61,.,.) =
7.0431e-03 -3.6082e-03 7.1986e-03
2.4895e-02 6.1671e-03 -3.2427e-02
7.2338e-03 2.2406e-03 -5.3330e-02
(62,62,.,.) =
2.8072e-02 -1.0571e-02 -1.3854e-02
-1.0879e-02 6.1929e-03 -5.6713e-03
-2.6083e-02 8.1861e-03 -3.2873e-02
(62,63,.,.) =
-3.1032e-02 -6.0485e-02 -2.5583e-02
-4.6239e-02 -2.2805e-02 -7.7678e-03
-9.4698e-03 4.0247e-03 -4.8637e-03
⋮
(63,0 ,.,.) =
2.3128e-02 -5.6038e-02 -3.4572e-02
1.0638e-03 5.7929e-02 -7.6970e-03
-3.0103e-02 3.5573e-02 -1.8143e-02
(63,1 ,.,.) =
9.6840e-02 -1.1186e-01 -7.8766e-02
-1.0444e-01 -1.0851e-01 -1.9553e-01
-1.1986e-01 -7.1474e-02 3.6750e-02
(63,2 ,.,.) =
-2.2194e-02 6.0298e-03 5.6914e-02
-4.8342e-02 7.8893e-02 -5.1026e-02
-5.1294e-02 -5.7434e-02 -1.9178e-02
...
(63,61,.,.) =
-4.4896e-02 -8.1267e-02 5.1794e-02
-8.3985e-02 -5.7778e-02 6.7891e-02
2.3837e-02 3.8954e-02 4.1141e-02
(63,62,.,.) =
4.6446e-03 2.7367e-02 -2.3154e-02
2.0675e-02 2.3429e-02 6.4380e-04
-5.2222e-02 -1.4854e-02 -2.5150e-02
(63,63,.,.) =
2.1291e-02 1.2736e-02 8.4553e-03
-8.2932e-02 7.2067e-02 1.3107e-01
8.5491e-03 1.3677e-01 3.9867e-02
[torch.FloatTensor of size 64x64x3x3]
), (u'layer1.1.bn2.running_mean',
-0.0555
-0.2037
0.7682
-0.0659
0.4746
-0.0462
-0.0896
0.0405
-0.2446
-0.3079
0.2418
-0.0135
-0.0139
-0.5716
0.1631
-0.1234
-0.0607
-0.0682
0.0326
0.0245
-0.1008
0.0646
0.0028
-0.0101
-0.0145
0.0377
-0.0842
0.0183
-0.5056
-0.0529
-0.0573
-0.1212
-0.3578
-0.2472
-0.3403
0.0570
-0.2512
-0.2658
-0.1210
-0.0369
-0.0996
0.2838
0.1478
-0.1105
-0.4597
-0.1867
-0.2858
0.1237
-0.1291
-0.2389
0.0203
0.1081
-0.2310
-0.0848
-0.0316
0.2546
0.0597
-0.1729
-0.0190
0.1898
0.0823
0.0380
-0.0429
0.1392
[torch.FloatTensor of size 64]
), (u'layer1.1.bn2.running_var',
0.0485
0.1034
0.0663
0.0458
0.1147
0.0534
0.0654
0.0467
0.0442
0.0820
0.0332
0.0400
0.0379
0.0849
0.0409
0.0282
0.0821
0.0699
0.0327
0.0497
0.0506
0.1060
0.0921
0.0300
0.0170
0.0383
0.0358
0.0383
0.0745
0.0579
0.0390
0.0504
0.0494
0.0617
0.0458
0.0347
0.0525
0.0575
0.0475
0.0354
0.0658
0.0336
0.0437
0.0734
0.0574
0.0596
0.0452
0.0403
0.0789
0.0551
0.0328
0.0775
0.0722
0.0390
0.0501
0.0394
0.0454
0.0450
0.0899
0.0297
0.0527
0.0184
0.0526
0.0340
[torch.FloatTensor of size 64]
), (u'layer1.1.bn2.weight', Parameter containing:
0.2560
0.5690
0.4042
0.5130
0.2178
0.4940
0.3315
0.5510
0.4354
0.5291
0.2081
0.4735
0.5945
0.5645
0.2761
0.2571
0.4853
0.6240
0.4370
0.2308
0.4970
0.3157
0.5706
0.2162
0.1932
0.1448
0.2218
0.2389
0.5871
0.3501
0.4109
0.3199
0.5808
0.3281
0.2723
0.1971
0.6139
0.4075
0.6304
0.3874
0.7605
0.2111
0.3071
0.4603
0.3099
0.1914
0.4431
0.2537
0.5745
0.6459
0.3914
0.3090
0.6782
0.1937
0.5814
0.2570
0.3514
0.2124
0.5794
0.3415
0.2051
0.0715
0.4090
0.4416
[torch.FloatTensor of size 64]
), (u'layer1.1.bn2.bias', Parameter containing:
-0.1778
-0.1287
0.0349
-0.1452
0.1864
-0.1413
-0.4201
-0.1334
0.2183
-0.1912
0.0311
-0.0235
-0.1724
-0.0274
-0.0295
-0.1031
0.0047
0.0828
-0.1521
0.0183
-0.2418
-0.0831
-0.0491
-0.0688
-0.2560
0.1381
-0.0165
0.2092
-0.0028
-0.0265
-0.0225
0.0286
-0.1065
-0.3698
0.2862
-0.1036
0.3080
-0.0894
0.2772
0.1136
-0.3157
0.0423
0.0567
0.2369
-0.0727
0.0465
-0.0536
0.1309
0.0282
-0.1371
0.1464
-0.0717
-0.3237
-0.1583
-0.0424
-0.1278
-0.1703
0.0413
0.0891
0.0770
-0.0730
0.0683
-0.0391
0.0476
[torch.FloatTensor of size 64]
), (u'layer2.0.conv1.weight', Parameter containing:
( 0 , 0 ,.,.) =
-7.1555e-02 -1.1031e-01 -1.3711e-01
7.0593e-02 -1.4782e-02 -1.0053e-01
1.1938e-01 8.7330e-02 -8.2206e-03
( 0 , 1 ,.,.) =
-2.3999e-02 -6.3682e-03 2.4303e-03
6.1831e-03 1.8781e-02 2.5324e-02
2.3656e-03 -4.0037e-03 -1.1949e-02
( 0 , 2 ,.,.) =
6.0344e-03 6.3784e-03 -1.2247e-02
7.8854e-03 -1.3464e-02 -4.2702e-02
1.7380e-02 -1.3862e-02 -4.7145e-02
...
( 0 ,61 ,.,.) =
3.4324e-02 3.2257e-02 2.5819e-02
8.4676e-03 -4.5413e-04 -1.0832e-02
-6.7166e-03 -1.5052e-02 -2.6939e-02
( 0 ,62 ,.,.) =
-1.2089e-02 -2.3588e-02 -2.2689e-02
1.0135e-02 1.8285e-02 -1.5695e-02
2.1352e-02 5.8568e-02 4.2873e-02
( 0 ,63 ,.,.) =
1.4421e-02 -2.8298e-02 -7.0770e-03
3.0260e-02 -6.6294e-03 -1.6901e-02
3.9085e-02 1.4222e-02 2.2294e-02
⋮
( 1 , 0 ,.,.) =
-7.7911e-02 -7.3929e-02 -3.6671e-02
-3.4903e-02 -6.2355e-02 -3.7793e-02
-2.8379e-02 -5.4291e-02 -4.9411e-02
( 1 , 1 ,.,.) =
-1.2970e-02 -2.1825e-02 -2.8767e-04
7.6444e-03 1.7653e-02 1.6660e-02
3.8337e-02 2.3006e-02 -1.6620e-03
( 1 , 2 ,.,.) =
-8.7592e-02 -8.4735e-02 -5.5818e-02
-7.7731e-02 -8.0311e-02 -3.2554e-02
-5.6313e-02 -4.2047e-02 1.5247e-02
...
( 1 ,61 ,.,.) =
-3.2377e-02 -4.0018e-02 -2.9523e-02
-1.5294e-02 -1.4165e-02 2.7086e-03
1.1652e-02 2.3886e-02 2.4413e-02
( 1 ,62 ,.,.) =
2.0891e-03 -3.0475e-02 -3.3818e-02
6.7829e-03 3.8681e-04 -1.4540e-02
-3.1306e-03 6.7689e-03 8.4524e-03
( 1 ,63 ,.,.) =
3.0586e-02 4.6281e-02 3.8359e-04
5.3079e-02 6.7488e-02 3.0547e-02
2.3374e-02 4.3993e-02 -3.8713e-03
⋮
( 2 , 0 ,.,.) =
1.3878e-02 3.2724e-02 4.6584e-02
-8.0647e-03 1.6209e-03 1.5153e-02
-7.0342e-02 -5.3299e-02 -4.5920e-02
( 2 , 1 ,.,.) =
4.6035e-02 3.5400e-02 3.4941e-02
5.8351e-02 5.4640e-02 2.7162e-02
2.6799e-02 4.5056e-02 6.6886e-03
( 2 , 2 ,.,.) =
-3.3766e-02 -3.8605e-02 -2.4172e-02
-1.8285e-03 1.0888e-02 1.1425e-02
2.2282e-02 1.4024e-02 3.6332e-03
...
( 2 ,61 ,.,.) =
-1.6330e-02 -6.9552e-02 -8.9737e-02
3.9766e-02 1.5501e-02 -2.2695e-02
1.0290e-01 1.2294e-01 6.3867e-02
( 2 ,62 ,.,.) =
-4.2318e-03 4.9511e-02 -7.6289e-03
-2.7720e-02 7.0398e-03 -9.4052e-03
-6.7008e-02 -6.0542e-02 -2.5967e-02
( 2 ,63 ,.,.) =
-5.8560e-03 -1.7573e-02 -3.8016e-02
2.8579e-03 -4.1603e-03 1.0113e-02
2.6243e-02 3.5200e-02 3.1143e-02
...
⋮
(125, 0 ,.,.) =
-4.4193e-02 -6.5322e-02 -1.7594e-02
-9.3970e-02 -5.8291e-02 1.2093e-02
-2.2998e-02 3.2463e-02 7.1731e-02
(125, 1 ,.,.) =
-4.7220e-03 -3.0125e-03 -1.8075e-02
1.2667e-02 -8.0509e-03 -1.4605e-02
7.8220e-03 -1.0720e-02 -2.6515e-02
(125, 2 ,.,.) =
-2.5299e-02 -4.9383e-02 -1.2720e-02
-5.2206e-02 -4.7233e-02 -4.2470e-03
-4.8697e-02 -2.5320e-02 8.6178e-03
...
(125,61 ,.,.) =
-3.7617e-03 7.8398e-03 -5.9525e-03
4.0277e-03 7.3575e-03 -1.1667e-02
-3.9997e-02 -3.8038e-02 -5.0469e-02
(125,62 ,.,.) =
-3.8949e-03 -6.8965e-03 3.4102e-02
-6.9814e-03 -4.9762e-02 5.8711e-02
1.8361e-02 2.5874e-02 8.0028e-02
(125,63 ,.,.) =
-3.3014e-02 -2.1510e-02 -2.1509e-03
-4.3894e-02 -3.2009e-02 -1.6265e-02
-1.1037e-02 2.8872e-04 3.0937e-02
⋮
(126, 0 ,.,.) =
-4.9907e-02 -5.0222e-02 -5.0985e-02
2.2644e-02 -1.4098e-02 -2.4426e-02
1.9960e-02 9.6426e-02 1.0580e-01
(126, 1 ,.,.) =
-3.6873e-02 2.1413e-03 8.3469e-03
-4.0796e-02 -3.3767e-02 -3.4955e-02
3.9466e-02 7.0508e-02 8.6065e-02
(126, 2 ,.,.) =
1.4842e-02 6.6914e-03 1.4324e-02
-3.2621e-02 -4.4027e-02 -2.2269e-02
7.1982e-03 -1.9187e-02 -4.9348e-03
...
(126,61 ,.,.) =
-4.9938e-03 1.6018e-02 1.1242e-02
-4.7668e-03 2.1921e-02 2.2660e-02
-2.6753e-02 2.6917e-04 -5.6827e-03
(126,62 ,.,.) =
-8.7725e-03 1.0761e-02 7.3603e-03
-1.8010e-05 -1.7926e-02 4.8229e-03
4.2431e-02 -1.5764e-02 2.3554e-02
(126,63 ,.,.) =
-1.3830e-02 -3.0793e-03 -4.0854e-03
3.3363e-02 4.2952e-02 3.5867e-02
-3.9653e-02 -3.0855e-02 -4.3189e-02
⋮
(127, 0 ,.,.) =
-3.8617e-02 -3.1549e-03 2.5739e-03
-1.1592e-02 9.8761e-03 7.5235e-03
-1.9339e-02 -9.8779e-03 2.1755e-03
(127, 1 ,.,.) =
1.6889e-04 1.8302e-03 -8.9537e-03
5.8343e-03 1.7360e-02 -1.9029e-02
5.8642e-03 -7.4307e-04 1.4667e-03
(127, 2 ,.,.) =
-1.6506e-02 -2.8401e-02 1.3986e-02
-2.2922e-02 -4.3484e-02 1.0471e-02
-2.5801e-03 -4.5258e-02 7.9791e-03
...
(127,61 ,.,.) =
-1.5260e-03 -7.6469e-03 1.3597e-02
5.5301e-04 -2.9176e-03 2.2147e-02
3.2763e-03 -1.0775e-05 1.3163e-02
(127,62 ,.,.) =
5.1756e-03 1.8495e-02 -8.0268e-03
-3.5030e-02 2.6403e-02 -7.1220e-03
-5.2325e-02 -1.1185e-02 1.9146e-02
(127,63 ,.,.) =
-6.8805e-02 5.1618e-02 1.9787e-02
2.5533e-02 -6.1926e-02 4.9924e-02
1.0532e-01 -4.4136e-02 4.9907e-02
[torch.FloatTensor of size 128x64x3x3]
), (u'layer2.0.bn1.running_mean',
0.1502
0.3009
-0.1475
-0.1210
-0.5701
-0.7525
0.0232
-0.1191
-0.5203
-0.0344
0.1527
-0.8009
-0.2133
-0.1956
-0.4503
-0.2632
0.0839
-1.3614
0.3520
0.0435
-0.5124
-0.4489
0.3674
-0.7865
-0.0061
-0.5502
-0.2629
-0.0697
-0.3892
0.8596
-0.0261
0.0194
-1.4822
0.2077
0.0741
-0.5370
0.6348
0.0066
-0.6156
-0.6373
-0.2649
0.3021
-0.6140
-0.8625
-1.1688
-0.2691
-0.7569
-0.7104
-0.5601
-0.3803
-0.6424
-0.5653
-0.3943
-0.8532
-0.8817
-0.5444
-0.2364
-0.2572
-0.0131
-1.1256
0.2372
-0.2265
-0.1682
-0.7450
-0.8640
0.2118
0.1918
0.5058
0.0755
-0.6975
-0.7518
0.5799
-0.2933
-0.0071
-0.6256
-0.2616
-0.6733
-1.1375
0.1193
-0.4987
-0.6461
-0.0576
0.0361
0.0026
-1.1884
0.2901
-0.7978
-0.2888
0.7106
-0.6718
-0.3914
0.3720
-0.4927
-0.5238
-0.0162
-0.5074
-0.3267
-1.2319
-0.1927
-0.1273
0.3230
-0.0156
-0.1317
-0.6099
-0.0179
-0.3003
-0.1247
0.1452
-0.4937
-0.4852
-0.3357
-0.3261
-0.3776
-0.3691
-0.6458
-0.3323
-0.0424
-0.2551
-0.6557
-0.2917
0.5345
-0.4286
0.1585
-0.3547
-0.1262
-1.0521
-0.2490
-0.2917
[torch.FloatTensor of size 128]
), (u'layer2.0.bn1.running_var',
0.5933
0.5225
0.9587
1.2984
0.6954
0.5495
0.7123
0.7396
0.4001
0.5929
0.4880
0.6532
0.5901
0.5786
0.5031
0.6984
0.6699
0.3613
0.6258
0.5226
0.6306
0.6908
0.4390
0.7955
0.8504
0.8891
0.4963
0.7665
0.9027
0.6111
0.4632
0.4609
0.8053
0.4621
0.5438
0.6401
0.6203
0.6133
0.6070
0.6558
0.5315
0.2684
0.1446
0.5651
0.7784
0.6977
0.4379
0.6246
0.7749
0.7301
0.5099
0.6298
0.5608
0.7794
0.6851
0.6257
1.0322
0.7427
0.9320
0.3659
0.7492
0.5316
0.6421
1.2247
0.6555
0.7807
0.7130
0.5728
0.4848
0.6181
0.6428
0.5033
0.6044
0.6865
0.6463
0.6181
0.5878
0.6709
0.8211
0.5813
0.6612
0.6009
0.6994
0.5487
0.3528
0.4863
0.7886
0.6114
0.3858
0.6698
0.4507
0.7751
0.5016
0.5925
1.1193
0.7031
0.5847
0.4395
0.6343
0.5480
0.6623
0.7094
0.5908
0.8719
1.1220
0.7039
0.6290
0.4964
0.7137
0.6734
0.4282
0.7129
0.5942
0.7962
0.6300
0.9883
0.6343
0.7726
0.6982
0.6926
0.6692
0.7207
0.4516
0.6805
0.5262
0.4744
0.7139
0.6144
[torch.FloatTensor of size 128]
), (u'layer2.0.bn1.weight', Parameter containing:
0.3248
0.3613
0.2960
0.2913
0.3407
0.3435
0.3049
0.3308
0.3447
0.3860
0.3196
0.2622
0.2994
0.2189
0.2397
0.3744
0.3555
0.1948
0.3349
0.2159
0.3349
0.3454
0.3094
0.3769
0.3546
0.3267
0.3178
0.3272
0.3832
0.2585
0.2973
0.3481
0.2827
0.2995
0.3451
0.3471
0.3440
0.3344
0.3211
0.3180
0.2940
0.3353
0.3253
0.3733
0.3198
0.2987
0.1620
0.3262
0.3271
0.3410
0.3693
0.3320
0.3357
0.2951
0.3115
0.3185
0.3139
0.2633
0.3089
0.3601
0.2734
0.3433
0.3335
0.3288
0.2706
0.2879
0.3318
0.3310
0.3170
0.2977
0.3300
0.3216
0.3205
0.3231
0.3481
0.3130
0.2826
0.2856
0.3279
0.3666
0.3288
0.3575
0.3377
0.2904
0.3273
0.3214
0.3332
0.3452
0.1842
0.3916
0.3337
0.2325
0.3285
0.3358
0.2885
0.3149
0.3288
0.2236
0.3159
0.2993
0.3403
0.3220
0.3171
0.2950
0.2847
0.3224
0.3119
0.2613
0.3374
0.3333
0.3330
0.2959
0.4087
0.2192
0.2982
0.4006
0.3081
0.3171
0.2862
0.2952
0.3070
0.3583
0.3232
0.3345
0.3453
0.3043
0.3327
0.3337
[torch.FloatTensor of size 128]
), (u'layer2.0.bn1.bias', Parameter containing:
-0.0589
-0.1686
-0.0206
0.0027
-0.0955
-0.1048
0.0349
-0.0885
-0.2053
-0.1764
-0.1224
-0.0364
-0.0785
0.2088
-0.0403
-0.1820
-0.1076
0.2989
-0.0570
0.2064
-0.0921
-0.1376
-0.1304
-0.1193
-0.1006
-0.0380
-0.1108
-0.0477
-0.1087
0.1581
-0.1123
-0.1584
0.0976
-0.0430
-0.1349
-0.1189
-0.0986
-0.0479
-0.0837
-0.0720
-0.0836
-0.2442
-0.3376
-0.2124
-0.0693
-0.0651
0.4979
-0.0811
-0.1021
-0.0788
-0.1802
-0.1011
-0.1090
-0.0617
-0.0856
-0.0495
-0.0370
0.0023
-0.0508
-0.2430
0.0009
-0.1525
-0.0963
-0.0516
-0.0473
0.0884
-0.1028
-0.0907
-0.1086
-0.0379
-0.1030
-0.1609
-0.0903
-0.0898
-0.1282
-0.0830
-0.0186
-0.0232
-0.0045
-0.2131
-0.1431
-0.1391
-0.1303
-0.0568
-0.1862
-0.1209
-0.0340
-0.1181
0.2298
-0.2085
-0.1335
0.1418
-0.0891
-0.1273
0.0107
-0.1029
-0.1025
0.1562
-0.0937
-0.0657
-0.1245
-0.0451
-0.0707
-0.0447
0.0715
-0.0484
-0.0312
-0.0437
-0.0927
-0.1465
-0.1151
-0.0183
-0.1927
0.2491
0.0300
-0.1310
-0.0468
-0.0851
-0.0421
-0.0413
-0.0457
-0.1433
-0.0981
-0.1046
-0.1315
-0.1249
-0.0982
-0.0961
[torch.FloatTensor of size 128]
), (u'layer2.0.conv2.weight', Parameter containing:
( 0 , 0 ,.,.) =
-7.4379e-03 -9.8091e-03 2.7976e-03
-1.0780e-02 2.5794e-02 4.5517e-02
-2.7241e-02 5.3206e-03 1.3177e-02
( 0 , 1 ,.,.) =
3.5440e-02 2.5101e-02 7.8204e-03
4.0312e-03 1.9894e-02 2.7449e-02
3.5329e-02 3.5456e-02 1.3315e-02
( 0 , 2 ,.,.) =
1.9270e-02 -2.1333e-02 -3.6199e-02
-1.9590e-02 -1.8873e-02 -5.9538e-02
-2.1838e-02 -7.6875e-03 3.9487e-03
...
( 0 ,125,.,.) =
-6.8038e-03 1.0841e-02 -3.7045e-03
1.3479e-02 1.1362e-02 -1.3431e-03
8.1422e-03 1.9292e-04 5.5109e-04
( 0 ,126,.,.) =
7.6939e-03 7.7306e-03 4.3960e-03
-1.0202e-02 -1.1698e-02 -9.6343e-03
-3.9049e-03 1.8147e-02 1.3297e-02
( 0 ,127,.,.) =
1.2434e-02 -2.6889e-02 -1.1974e-02
2.6846e-02 2.6409e-02 -2.1473e-02
1.2892e-02 2.7632e-03 -5.4267e-03
⋮
( 1 , 0 ,.,.) =
1.2840e-02 1.8529e-02 -2.6782e-03
-1.6777e-02 -1.2281e-02 3.5471e-02
-8.6486e-04 2.4498e-02 1.8152e-02
( 1 , 1 ,.,.) =
-6.6870e-03 -2.0710e-02 -1.4421e-02
-7.3135e-03 4.2568e-02 7.4339e-03
2.7640e-02 1.5997e-02 1.5939e-02
( 1 , 2 ,.,.) =
-2.2903e-02 -2.0577e-02 2.3593e-02
-2.7524e-02 -5.6073e-02 -6.9899e-02
2.0502e-02 5.1301e-02 2.1989e-02
...
( 1 ,125,.,.) =
-2.7188e-02 -3.8969e-02 -3.9503e-02
-6.2117e-02 -7.4923e-02 -9.5650e-02
-6.0467e-02 -7.7697e-02 -7.4620e-02
( 1 ,126,.,.) =
2.8663e-02 2.9341e-02 2.8688e-02
7.9438e-03 4.7108e-02 1.4586e-02
-1.8200e-03 2.2035e-02 7.3670e-03
( 1 ,127,.,.) =
1.5625e-03 -1.6815e-02 -4.6104e-03
-8.1347e-03 -2.5480e-02 -5.2408e-02
-9.2823e-03 -9.6452e-04 -3.7804e-02
⋮
( 2 , 0 ,.,.) =
4.7439e-03 6.0541e-03 -7.1074e-03
2.3583e-02 -9.3094e-02 -7.9317e-02
-7.8970e-03 -5.0526e-02 -1.0469e-02
( 2 , 1 ,.,.) =
1.4810e-02 1.6199e-02 -5.1457e-02
8.5937e-03 8.1354e-03 -4.2865e-02
9.0780e-02 6.5382e-02 4.3530e-02
( 2 , 2 ,.,.) =
-1.3827e-02 -6.3971e-03 8.4965e-03
1.3832e-02 -1.2413e-02 5.3880e-03
2.0189e-02 -3.5255e-03 7.9905e-03
...
( 2 ,125,.,.) =
-9.2351e-04 1.8478e-02 -3.0603e-02
-1.7034e-02 5.6756e-03 -4.9061e-02
-3.2771e-02 -3.7422e-02 -4.5931e-02
( 2 ,126,.,.) =
-4.6355e-03 6.9231e-03 -1.0628e-03
-7.9292e-03 -4.9909e-02 4.2104e-02
-7.5158e-02 -4.7826e-03 -5.8031e-03
( 2 ,127,.,.) =
1.1503e-02 -1.4634e-02 3.7884e-02
1.4056e-02 4.8553e-02 2.3157e-02
2.1494e-02 -1.0090e-02 3.3782e-02
...
⋮
(125, 0 ,.,.) =
2.6448e-02 4.0213e-03 7.5348e-03
6.3626e-02 -3.1986e-02 -1.8433e-03
2.6220e-02 7.5575e-03 4.9462e-02
(125, 1 ,.,.) =
-2.8731e-02 -2.2669e-02 -5.1264e-02
-2.6000e-02 -4.8740e-02 -1.4003e-02
-1.7263e-02 -4.1574e-02 -1.1665e-02
(125, 2 ,.,.) =
-3.4972e-02 3.5634e-02 3.4700e-02
1.8265e-02 4.3594e-02 -2.6302e-02
1.7826e-02 3.5585e-02 1.1340e-02
...
(125,125,.,.) =
9.7429e-03 -1.7253e-02 -1.6983e-04
-1.9886e-02 8.1994e-02 1.2903e-02
-2.3786e-02 -4.7812e-03 4.8584e-02
(125,126,.,.) =
-2.4373e-02 -2.5836e-02 -3.5317e-02
-2.9582e-02 -9.6624e-02 -5.3546e-02
-1.5009e-02 5.9241e-03 -1.9719e-02
(125,127,.,.) =
6.8366e-03 -3.6779e-02 -2.5541e-02
-1.1634e-02 -2.3650e-02 -7.8005e-03
8.6452e-03 7.8958e-03 -1.8926e-02
⋮
(126, 0 ,.,.) =
3.2894e-02 2.9690e-02 1.1071e-02
3.8989e-02 -8.9897e-03 2.2632e-02
7.8374e-03 -2.7959e-02 -2.3005e-02
(126, 1 ,.,.) =
1.3667e-02 2.2886e-02 -1.8989e-02
2.7104e-03 1.1235e-02 7.4223e-03
2.1089e-02 4.3557e-02 1.0752e-02
(126, 2 ,.,.) =
-2.3662e-02 2.2110e-02 4.3471e-04
-3.0925e-02 6.0868e-02 1.6691e-02
-8.8467e-02 -8.3442e-02 -3.4247e-02
...
(126,125,.,.) =
-7.3418e-03 -1.9690e-02 1.7969e-03
7.2727e-03 -3.4322e-02 -2.4270e-02
-1.1512e-02 -6.0470e-02 -5.5070e-02
(126,126,.,.) =
3.0219e-03 2.6285e-03 1.7110e-02
-1.3418e-02 -8.5859e-03 9.0284e-03
1.9504e-02 9.4355e-03 4.5180e-03
(126,127,.,.) =
1.3674e-03 7.6213e-04 1.1925e-02
-2.3910e-03 -1.0733e-02 1.2625e-02
-5.0613e-03 -5.7724e-03 -1.4643e-02
⋮
(127, 0 ,.,.) =
-7.4213e-03 1.1754e-02 -4.2728e-02
1.6309e-03 -4.5691e-02 -1.3976e-01
-6.5419e-03 -2.0547e-03 -4.8392e-02
(127, 1 ,.,.) =
7.5053e-03 5.2659e-02 3.8849e-02
-1.2484e-02 8.4685e-02 6.2233e-03
1.3136e-03 -1.9656e-02 -8.2167e-02
(127, 2 ,.,.) =
-2.4916e-02 1.6551e-02 1.6914e-02
8.6507e-03 2.1444e-02 1.1694e-02
-9.0502e-04 3.0596e-02 1.3600e-02
...
(127,125,.,.) =
-7.8114e-03 2.2029e-02 -1.7545e-02
-7.5889e-03 -2.1149e-02 -3.6984e-03
1.2622e-02 -2.0709e-02 -5.3862e-03
(127,126,.,.) =
3.0152e-02 -8.2268e-03 -6.4910e-02
-2.3752e-02 -9.5375e-02 -5.3019e-02
-1.6835e-02 -1.1071e-02 9.9055e-04
(127,127,.,.) =
-2.4533e-02 -8.4685e-02 2.5065e-02
1.0639e-02 3.8693e-02 1.4004e-01
1.5497e-02 -9.5081e-03 4.0948e-03
[torch.FloatTensor of size 128x128x3x3]
), (u'layer2.0.bn2.running_mean',
-0.4532
-0.1524
-0.3771
-0.0713
-0.2878
-0.1534
-0.5443
-0.1878
-0.2956
-0.0365
-0.0336
-0.1475
0.0068
-0.1377
-0.1417
-0.3703
-0.4644
-0.1248
0.4767
0.0385
-0.3257
-0.1539
-0.3181
-0.1037
0.0639
-0.2066
-0.1608
0.0557
0.1252
-0.3812
-0.2301
-0.1256
-0.2842
-0.0949
-0.3629
-0.1013
0.3173
-0.1264
-0.1660
-0.1444
-0.9475
-1.0992
-0.0125
-0.0690
0.1497
-0.3284
0.0886
-0.3466
-0.2630
-0.1583
-0.3092
-0.0368
-0.0169
-0.3435
-0.3742
-0.2488
-0.1078
-0.3069
-0.1010
0.1655
-0.4201
-0.2702
-0.1342
-0.0495
-0.4643
-0.2271
-0.4530
-0.0365
-0.4531
-0.0485
0.0403
-0.0798
-0.0885
-0.1399
-0.5255
0.0254
-0.1210
-0.2685
-0.1447
-0.1114
-0.5782
-0.3445
-0.0098
-0.8503
-0.0380
-0.2450
-0.0705
-0.1167
-0.1946
-0.3769
-0.5091
0.2355
-0.1791
-0.2465
-0.2035
-0.3560
0.0100
-0.2061
-0.0407
-0.6231
-0.0431
-0.2874
-0.3627
-0.1486
-0.0271
-0.3714
0.1313
-0.1827
-0.2294
-0.0660
-0.0431
-0.9597
0.0849
-0.0855
-0.3286
-0.9559
-0.1640
-0.0745
0.1040
-0.3808
-0.4664
0.0823
-0.2148
-0.3367
-0.0775
-0.1677
-0.0668
-0.1016
[torch.FloatTensor of size 128]
), (u'layer2.0.bn2.running_var',
0.0481
0.0571
0.0619
0.0319
0.0896
0.0538
0.0606
0.1026
0.0445
0.1045
0.0477
0.0751
0.0312
0.0500
0.0453
0.0511
0.0846
0.0792
0.1995
0.0590
0.0555
0.0877
0.0545
0.0825
0.0511
0.1046
0.0602
0.0467
0.0575
0.0667
0.0973
0.0930
0.0601
0.0702
0.0693
0.0347
0.1059
0.0404
0.0449
0.2404
0.1996
0.1850
0.0337
0.0491
0.0327
0.0976
0.0398
0.0999
0.0879
0.0753
0.0368
0.0639
0.1159
0.0487
0.1282
0.0614
0.0541
0.0333
0.0908
0.0726
0.0490
0.0751
0.0646
0.0694
0.1447
0.1111
0.1868
0.0648
0.0639
0.0538
0.0637
0.0589
0.0643
0.1066
0.1363
0.0845
0.0670
0.1007
0.0361
0.0741
0.0437
0.0776
0.0721
0.0685
0.0612
0.0608
0.0688
0.1067
0.0610
0.0797
0.0385
0.0575
0.0512
0.0672
0.0229
0.0898
0.0729
0.0448
0.0379
0.2440
0.0769
0.0878
0.0522
0.0541
0.0225
0.0741
0.1303
0.0576
0.0836
0.0499
0.0524
0.1636
0.0871
0.0577
0.0498
0.1113
0.0679
0.0683
0.0465
0.0505
0.1792
0.0842
0.0414
0.0971
0.0470
0.0575
0.0490
0.0455
[torch.FloatTensor of size 128]
), (u'layer2.0.bn2.weight', Parameter containing:
0.1454
0.3270
0.3113
0.2538
0.4086
0.3937
0.4400
0.3108
0.3406
0.2168
0.2170
0.3857
0.1971
0.2692
0.1663
0.2454
0.3232
0.3686
0.3893
0.3264
0.3875
0.4707
0.1958
0.4717
0.1673
0.3938
0.3044
0.1929
0.2175
0.2119
0.4230
0.3683
0.2455
0.2229
0.3370
0.3229
0.2688
0.3557
0.2581
0.4031
0.4492
0.3642
0.2599
0.1881
0.1359
0.2958
0.1913
0.3065
0.3981
0.4102
0.1874
0.4516
0.3340
0.1628
0.3599
0.1624
0.2886
0.1358
0.4491
0.2694
0.4823
0.3393
0.4764
0.3155
0.6005
0.4654
0.5264
0.2991
0.2992
0.4621
0.2614
0.4247
0.4662
0.4249
0.3345
0.2655
0.4048
0.3605
0.1782
0.3833
0.2823
0.3843
0.3307
0.2151
0.3317
0.1458
0.2771
0.4917
0.3199
0.4222
0.1559
0.4884
0.3267
0.3440
0.1608
0.4855
0.2677
0.1616
0.3221
0.4243
0.3661
0.1893
0.3400
0.3648
0.1779
0.3544
0.2852
0.2437
0.4472
0.3011
0.3997
0.6173
0.2794
0.4867
0.1502
0.6021
0.3604
0.4696
0.3711
0.2388
0.5347
0.1509
0.3213
0.4394
0.3229
0.4329
0.1489
0.3702
[torch.FloatTensor of size 128]
), (u'layer2.0.bn2.bias', Parameter containing:
0.0246
0.0593
0.1347
-0.1089
-0.0470
-0.1359
-0.0550
0.0509
-0.0613
0.0916
0.0031
-0.0274
-0.0539
0.0177
0.0432
0.0074
0.0548
-0.0321
-0.0224
0.0142
-0.2150
-0.1160
0.0486
-0.1141
0.1066
0.0355
0.0140
0.0177
0.0781
0.1331
0.0139
0.0447
0.1063
0.0528
-0.0539
-0.1160
0.1055
-0.1591
0.0100
0.1197
0.0170
0.0929
-0.0675
0.0987
0.1034
0.0501
0.0297
0.0281
-0.0075
-0.0577
-0.0144
-0.1640
0.1255
0.0817
0.0635
0.0936
0.0213
0.0486
-0.1174
0.0237
-0.2177
0.0099
-0.1883
0.0467
-0.0829
0.0585
-0.0306
0.0509
0.0541
-0.1671
0.0115
-0.0302
-0.1393
0.0115
0.0428
0.1189
-0.1289
0.0479
0.0474
-0.0625
0.0009
-0.0144
0.0909
0.1342
-0.0338
0.0560
0.0848
-0.0467
0.0228
-0.0097
0.1360
-0.2625
0.0088
-0.0553
0.0383
-0.0720
0.0907
0.1612
-0.1076
0.1011
-0.0519
0.0838
-0.0704
-0.0806
-0.0243
0.0533
0.1277
0.1403
-0.0593
-0.0639
-0.0766
-0.1163
0.0661
-0.1644
0.0422
-0.2786
-0.1006
-0.0696
-0.0761
0.0371
-0.0247
0.0916
-0.0200
-0.0176
0.0298
-0.0373
0.0466
-0.1371
[torch.FloatTensor of size 128]
), (u'layer2.0.downsample.0.weight', Parameter containing:
( 0 , 0 ,.,.) =
1.5916e-02
( 0 , 1 ,.,.) =
-3.1090e-01
( 0 , 2 ,.,.) =
1.2615e-02
...
( 0 ,61 ,.,.) =
-1.6723e-01
( 0 ,62 ,.,.) =
1.2692e-02
( 0 ,63 ,.,.) =
1.3152e-02
⋮
( 1 , 0 ,.,.) =
3.5526e-03
( 1 , 1 ,.,.) =
-1.0868e-03
( 1 , 2 ,.,.) =
-8.2883e-03
...
( 1 ,61 ,.,.) =
-2.3444e-02
( 1 ,62 ,.,.) =
-7.5592e-02
( 1 ,63 ,.,.) =
-1.2622e-02
⋮
( 2 , 0 ,.,.) =
-4.1898e-02
( 2 , 1 ,.,.) =
7.9478e-03
( 2 , 2 ,.,.) =
-1.6623e-01
...
( 2 ,61 ,.,.) =
3.1887e-02
( 2 ,62 ,.,.) =
-1.8766e-02
( 2 ,63 ,.,.) =
6.4507e-02
...
⋮
(125, 0 ,.,.) =
-2.8725e-02
(125, 1 ,.,.) =
4.7026e-02
(125, 2 ,.,.) =
-5.2251e-02
...
(125,61 ,.,.) =
-4.7365e-02
(125,62 ,.,.) =
5.8639e-02
(125,63 ,.,.) =
5.8808e-02
⋮
(126, 0 ,.,.) =
-7.7884e-03
(126, 1 ,.,.) =
-2.0288e-02
(126, 2 ,.,.) =
5.6392e-02
...
(126,61 ,.,.) =
7.8023e-01
(126,62 ,.,.) =
-2.2917e-03
(126,63 ,.,.) =
-2.5941e-02
⋮
(127, 0 ,.,.) =
-2.8316e-02
(127, 1 ,.,.) =
-1.3194e-02
(127, 2 ,.,.) =
-5.1356e-02
...
(127,61 ,.,.) =
2.3552e-02
(127,62 ,.,.) =
-6.7667e-02
(127,63 ,.,.) =
2.6754e-02
[torch.FloatTensor of size 128x64x1x1]
), (u'layer2.0.downsample.1.running_mean',
-0.2113
0.1359
0.0039
0.0886
-0.0546
-0.2716
0.2521
-0.2035
0.0303
-0.1464
-0.2640
-0.4436
-0.3815
-0.1463
0.0573
-0.2120
-0.0665
0.2438
0.0832
0.0040
-0.2136
-0.1755
-0.7201
-0.2233
0.1047
0.1467
-0.3165
-0.2010
0.2569
-0.8141
-0.0867
-0.0875
-0.9794
-0.2197
-0.0568
-0.3848
0.2579
0.1735
-0.0528
0.3276
-0.4380
0.1895
-0.1316
-0.3101
-0.2862
-0.0167
-0.2216
-0.1930
0.0454
-0.3049
0.1863
-0.5461
0.0461
0.1899
-0.0353
-0.2415
0.0813
0.4788
0.0519
0.0438
0.1379
-0.4036
-0.1231
0.0551
-0.0663
0.1699
-0.3095
-0.1080
-0.1431
0.2339
-0.2893
0.3513
0.1893
-0.0789
-0.5882
-0.1365
-0.2919
0.2869
0.3085
-0.1096
0.3905
-0.2630
-0.2150
-0.1966
-0.2579
-0.0904
0.0506
-0.0275
0.4067
0.0970
-0.3976
0.2176
0.2585
0.1078
-0.2607
-0.1126
-0.2001
-0.4400
-0.1181
0.2168
-0.1290
-0.1434
0.2498
-0.2811
-0.2768
-0.5209
0.1785
0.1161
-0.1806
-0.1448
-0.0704
-0.3591
-0.4581
-0.1117
-0.1916
0.7261
-0.2382
0.0126
0.0749
-0.0097
0.0480
0.9940
0.0634
0.0629
-0.7954
-0.1612
1.3040
-0.2879
[torch.FloatTensor of size 128]
), (u'layer2.0.downsample.1.running_var',
0.1951
0.0151
0.0247
0.0691
0.0665
0.0386
0.0292
0.1873
0.0476
0.0859
0.1065
0.0916
0.1233
0.0595
0.1220
0.0878
0.0620
0.0835
0.1198
0.0264
0.1417
0.0151
0.0808
0.0223
0.1227
0.0093
0.1094
0.1057
0.1190
0.1483
0.0764
0.0185
0.0642
0.2118
0.1243
0.0555
0.0427
0.0556
0.1126
0.0959
0.0943
0.1135
0.0661
0.0704
0.1229
0.1406
0.0859
0.0672
0.0138
0.1057
0.1114
0.0589
0.0269
0.0969
0.0489
0.1290
0.0768
0.0935
0.0215
0.1296
0.0122
0.0591
0.0583
0.0216
0.0135
0.0106
0.0342
0.0199
0.0566
0.0168
0.0640
0.0537
0.0322
0.0318
0.0584
0.0361
0.0155
0.0159
0.0949
0.0965
0.0927
0.0331
0.0240
0.1121
0.0693
0.2177
0.0251
0.0650
0.0345
0.0357
0.1534
0.0568
0.0370
0.0442
0.0752
0.0413
0.0251
0.0582
0.0370
0.1190
0.0993
0.2644
0.0537
0.0495
0.1122
0.0638
0.0302
0.0376
0.0187
0.0634
0.0307
0.0378
0.1793
0.0240
0.2015
0.0337
0.1444
0.0368
0.0165
0.0710
0.0133
0.2638
0.0288
0.0057
0.0462
0.0291
0.1198
0.0450
[torch.FloatTensor of size 128]
), (u'layer2.0.downsample.1.weight', Parameter containing:
0.3334
0.0581
0.0715
0.3442
0.1756
0.1509
0.1568
0.3100
0.1927
0.1516
0.3044
0.2238
0.3706
0.1739
0.3051
0.2610
0.1575
0.2015
0.2933
0.1010
0.5871
0.0676
0.2499
0.0929
0.2443
0.0495
0.2449
0.2750
0.3071
0.3025
0.1818
0.0688
0.2223
0.3766
0.4661
0.3284
0.1035
0.3400
0.2325
0.1514
0.1753
0.2269
0.2606
0.1831
0.2894
0.2590
0.2208
0.1399
0.0643
0.2833
0.3451
0.2017
0.0696
0.2722
0.1127
0.2917
0.2358
0.2703
0.0911
0.2591
0.1302
0.2261
0.1967
0.0539
0.0697
0.0524
0.1050
0.0861
0.1173
0.0957
0.1862
0.1642
0.1336
0.1065
0.1312
0.0888
0.0793
0.0475
0.3049
0.2325
0.2908
0.1292
0.0778
0.2263
0.2379
0.3405
0.0914
0.1936
0.1223
0.1400
0.2953
0.2360
0.1681
0.1338
0.2666
0.1495
0.0761
0.1674
0.1784
0.1720
0.2318
0.3753
0.2103
0.1922
0.4002
0.1718
0.0593
0.0742
0.0686
0.1931
0.1386
0.1111
0.3055
0.1205
0.3443
0.1633
0.3673
0.1534
0.0742
0.2088
0.0394
0.2594
0.1385
-0.0051
0.1905
0.1275
0.3071
0.1682
[torch.FloatTensor of size 128]
), (u'layer2.0.downsample.1.bias', Parameter containing:
0.0246
0.0593
0.1347
-0.1089
-0.0470
-0.1359
-0.0550
0.0509
-0.0613
0.0916
0.0031
-0.0274
-0.0539
0.0177
0.0432
0.0074
0.0548
-0.0321
-0.0224
0.0142
-0.2150
-0.1160
0.0486
-0.1141
0.1066
0.0355
0.0140
0.0177
0.0781
0.1331
0.0139
0.0447
0.1063
0.0528
-0.0539
-0.1160
0.1055
-0.1591
0.0100
0.1197
0.0170
0.0929
-0.0675
0.0987
0.1034
0.0501
0.0297
0.0281
-0.0075
-0.0577
-0.0144
-0.1640
0.1255
0.0817
0.0635
0.0936
0.0213
0.0486
-0.1174
0.0237
-0.2177
0.0099
-0.1883
0.0467
-0.0829
0.0585
-0.0306
0.0509
0.0541
-0.1671
0.0115
-0.0302
-0.1393
0.0115
0.0428
0.1189
-0.1289
0.0479
0.0474
-0.0625
0.0009
-0.0144
0.0909
0.1342
-0.0338
0.0560
0.0848
-0.0467
0.0228
-0.0097
0.1360
-0.2625
0.0088
-0.0553
0.0383
-0.0720
0.0907
0.1612
-0.1076
0.1011
-0.0519
0.0838
-0.0704
-0.0806
-0.0243
0.0533
0.1277
0.1403
-0.0593
-0.0639
-0.0766
-0.1163
0.0661
-0.1644
0.0422
-0.2786
-0.1006
-0.0696
-0.0761
0.0371
-0.0247
0.0916
-0.0200
-0.0176
0.0298
-0.0373
0.0466
-0.1371
[torch.FloatTensor of size 128]
), (u'layer2.1.conv1.weight', Parameter containing:
( 0 , 0 ,.,.) =
-9.9023e-04 -7.7429e-03 -7.9740e-03
2.4844e-02 1.8642e-03 5.8352e-03
9.5089e-03 -1.6476e-02 3.9157e-03
( 0 , 1 ,.,.) =
-2.1488e-02 -1.2330e-03 -1.4281e-02
-1.7044e-02 9.5922e-03 7.0445e-03
1.0790e-02 -7.2350e-03 -1.1357e-02
( 0 , 2 ,.,.) =
-1.1126e-03 3.0388e-02 2.2247e-02
-6.1184e-02 -2.3797e-02 2.3747e-03
4.0678e-02 -1.0356e-01 -6.0011e-02
...
( 0 ,125,.,.) =
-8.5833e-03 1.1438e-02 2.0800e-02
-1.6565e-02 -3.9587e-02 1.2594e-02
-1.4314e-03 -5.4257e-03 3.6794e-02
( 0 ,126,.,.) =
-1.3687e-02 -2.9514e-02 -1.4745e-02
2.8299e-02 2.2096e-02 3.4839e-03
-4.3521e-03 -2.6706e-03 1.2258e-04
( 0 ,127,.,.) =
7.6403e-03 2.0666e-02 3.7429e-02
6.9478e-03 4.3983e-02 1.7538e-02
-9.7797e-03 -2.4789e-02 -1.1349e-03
⋮
( 1 , 0 ,.,.) =
8.4439e-02 8.4827e-02 -5.1478e-02
3.5253e-02 -1.1375e-03 -1.0331e-01
-6.4078e-02 -1.2660e-01 -1.2952e-01
( 1 , 1 ,.,.) =
1.0628e-03 -1.4083e-02 4.7109e-03
-2.1059e-02 -2.8778e-02 9.9708e-03
1.4074e-02 1.8691e-02 5.8192e-02
( 1 , 2 ,.,.) =
2.2139e-02 8.9027e-03 1.4790e-02
-1.7497e-02 -5.3924e-03 2.7834e-02
-1.3855e-02 -1.3346e-02 1.7668e-02
...
( 1 ,125,.,.) =
-3.8032e-02 -2.3097e-02 -7.1775e-03
-3.5089e-02 1.0861e-02 1.3640e-02
6.3449e-04 9.7476e-03 7.3670e-03
( 1 ,126,.,.) =
-4.4184e-02 -1.6190e-02 1.2243e-02
-4.0349e-02 -1.7894e-02 2.8911e-02
-6.5176e-03 -1.0490e-02 9.1658e-03
( 1 ,127,.,.) =
4.3621e-03 1.3119e-02 1.8442e-03
1.1555e-02 -1.3031e-02 -9.5657e-03
-2.3314e-02 1.1609e-03 2.6771e-03
⋮
( 2 , 0 ,.,.) =
-2.1180e-02 -6.2213e-03 1.7609e-03
-4.7424e-03 1.1101e-02 1.1296e-02
-1.4529e-02 2.9843e-02 2.4383e-03
( 2 , 1 ,.,.) =
6.9183e-03 9.2937e-03 3.0078e-02
-4.2612e-03 4.9560e-03 -4.7338e-03
3.1360e-02 1.9035e-03 -4.7242e-03
( 2 , 2 ,.,.) =
-3.6726e-02 5.7285e-03 1.3919e-01
-4.2992e-02 9.4023e-04 7.7141e-02
-5.0050e-02 -4.9479e-03 2.4693e-02
...
( 2 ,125,.,.) =
3.7203e-02 7.4712e-03 -4.2659e-02
-8.1729e-03 -9.2536e-02 -5.4934e-03
-2.5927e-02 8.3993e-04 7.4632e-02
( 2 ,126,.,.) =
1.8076e-02 4.5272e-03 -1.3757e-02
-1.8939e-02 -3.2739e-02 -2.9666e-02
-2.0608e-02 -4.6167e-03 1.3080e-03
( 2 ,127,.,.) =
-1.2078e-02 -2.0285e-03 -1.6998e-02
-3.4805e-02 -4.9195e-02 -3.1973e-02
-2.1021e-02 -5.1164e-03 -4.8522e-03
...
⋮
(125, 0 ,.,.) =
3.1791e-02 2.2948e-02 1.0390e-02
-1.2628e-02 -2.9320e-03 4.2645e-03
-2.1707e-02 -1.0856e-02 1.6094e-02
(125, 1 ,.,.) =
-1.4525e-03 -1.0131e-02 -4.6862e-04
2.2130e-02 2.2736e-02 5.0183e-03
-6.0125e-02 -4.3150e-02 -4.4480e-02
(125, 2 ,.,.) =
3.0761e-03 3.4396e-03 6.0877e-03
-1.3683e-02 4.0576e-03 -2.6544e-02
6.8231e-02 6.3474e-02 -9.3660e-03
...
(125,125,.,.) =
1.8752e-02 1.9400e-02 4.1691e-02
8.7770e-03 8.2394e-04 1.8619e-02
1.8796e-02 6.2238e-02 -2.3801e-02
(125,126,.,.) =
-2.9788e-02 -3.4598e-02 -2.5225e-02
8.4234e-03 -2.3222e-02 -9.4612e-03
6.9035e-03 6.9737e-02 -1.3359e-02
(125,127,.,.) =
2.6981e-03 -4.3182e-02 -1.6731e-02
2.5812e-02 -7.2025e-02 -6.5399e-02
4.6257e-02 2.9469e-02 -1.5811e-02
⋮
(126, 0 ,.,.) =
-2.1079e-02 3.8220e-02 8.3305e-03
-5.9912e-03 3.5584e-02 -1.7534e-03
1.8735e-02 7.0859e-03 -3.5151e-03
(126, 1 ,.,.) =
-4.5937e-02 -7.4695e-02 -5.3608e-02
-8.6266e-03 9.0894e-03 -3.0345e-02
-2.8158e-02 -2.1204e-02 -8.4730e-03
(126, 2 ,.,.) =
-7.1772e-02 -6.8582e-02 2.5544e-02
5.0363e-02 2.5269e-02 5.6668e-02
2.6238e-03 1.3871e-03 -8.4692e-03
...
(126,125,.,.) =
-2.9644e-02 1.0896e-02 -3.0402e-02
1.5095e-03 5.0455e-02 1.5597e-02
-2.1015e-02 -1.0757e-02 -3.4942e-02
(126,126,.,.) =
-2.7573e-02 2.9707e-02 -2.9490e-02
2.3301e-03 -3.9011e-02 6.8010e-03
4.4006e-02 3.5397e-02 7.9087e-02
(126,127,.,.) =
-2.7480e-02 5.0337e-02 1.4290e-02
-5.2482e-02 -4.7748e-03 1.2988e-02
-1.8935e-02 -3.0808e-02 -1.7583e-02
⋮
(127, 0 ,.,.) =
3.2280e-02 4.7408e-02 3.4054e-02
2.1445e-02 3.8987e-03 4.6985e-04
1.5159e-02 8.2067e-03 3.2426e-02
(127, 1 ,.,.) =
9.2653e-03 2.3661e-02 4.2089e-02
2.1976e-02 4.6128e-02 1.1402e-02
7.2843e-03 5.2285e-02 8.6340e-03
(127, 2 ,.,.) =
1.4022e-02 1.2800e-02 3.5398e-02
-4.4398e-02 1.7399e-02 -1.5838e-02
3.1712e-02 5.8679e-02 -9.3244e-03
...
(127,125,.,.) =
-4.8399e-03 7.8628e-03 -5.6169e-04
8.0402e-03 1.7392e-02 7.8734e-03
-1.7713e-02 -4.5957e-02 -9.8762e-03
(127,126,.,.) =
-9.7569e-03 -7.5795e-03 -2.4627e-02
-8.2454e-03 6.3065e-02 -3.2954e-03
-7.7549e-03 -1.3404e-04 -8.1337e-03
(127,127,.,.) =
1.7664e-02 1.0114e-02 4.2687e-03
-3.7950e-03 2.6715e-02 2.0121e-02
1.6868e-02 -6.6515e-03 -1.1107e-02
[torch.FloatTensor of size 128x128x3x3]
), (u'layer2.1.bn1.running_mean',
-0.3593
-0.4772
0.2329
-0.7139
-0.6713
-0.5552
-0.4556
-0.6502
-0.2082
-0.4011
-0.3942
-0.2970
-0.1626
-0.4379
-0.3334
-0.6163
-0.2982
-0.5190
0.1676
-0.1832
-0.2080
-0.5296
-0.4245
-0.1755
-0.8556
-0.3067
-0.4560
-0.1642
-0.5059
-0.4529
-0.4532
-0.7254
0.6037
-0.2509
-0.0199
-0.4672
-0.5901
-0.4195
-0.3272
0.5658
-0.3438
-0.5992
-0.2683
-0.4591
-0.3460
-0.1669
-0.3271
0.0351
-0.4175
-0.3984
-0.4118
-0.3619
-0.1313
-0.2758
-0.7196
-0.5401
-1.1739
-0.0497
-0.1358
-0.6139
-0.5143
-0.3017
-0.0465
-0.3977
-0.0251
0.3821
-0.5079
-0.2795
-0.1904
0.3993
-0.4418
-0.1813
-0.6122
-0.3132
-0.0656
-0.4458
0.0894
-0.3759
0.0440
-0.3972
-0.2860
0.0877
-0.0825
-0.7620
-0.0260
-0.3861
-0.1128
-0.4129
-0.2883
1.1054
-0.3892
-0.0393
-0.1394
-0.1678
0.1825
-0.4379
-0.2522
-0.1119
-0.5098
-0.0328
-0.2874
-0.3809
-0.1929
-0.3355
-0.3863
-0.1617
-0.2289
0.1665
-0.6874
-0.1705
-0.5216
-0.3315
-0.6678
0.5342
0.1433
-0.5558
-0.4277
-0.3240
-0.2142
0.0216
-0.4379
-0.8486
-0.7675
-0.4512
0.2788
-0.9694
-1.1691
0.0198
[torch.FloatTensor of size 128]
), (u'layer2.1.bn1.running_var',
0.2660
0.1422
0.2404
0.4297
0.1306
0.3336
0.1939
0.1600
0.2166
0.4070
0.1029
0.3442
0.2021
0.1590
0.2226
0.1842
0.2731
0.2262
0.2178
0.1536
0.1722
0.2035
0.3391
0.1572
0.2276
0.2740
0.1543
0.1307
0.1649
0.2571
0.1431
0.2349
0.1765
0.1171
0.3401
0.1657
0.1307
0.3169
0.1973
0.1504
0.3181
0.2529
0.2980
0.2461
0.2857
0.2814
0.1889
0.1125
0.2079
0.2131
0.2158
0.3372
0.2791
0.2852
0.5102
0.1808
0.2540
0.3876
0.2048
0.1716
0.2775
0.2385
0.1992
0.3325
0.1832
0.1246
0.1852
0.2083
0.3179
0.3077
0.1842
0.1845
0.1684
0.2447
0.2990
0.2412
0.3370
0.1974
0.1679
0.2459
0.1670
0.1764
0.2258
0.3743
0.1464
0.1706
0.2925
0.2594
0.2123
0.2191
0.2281
0.1809
0.1278
0.2575
0.3387
0.1755
0.3083
0.1399
0.2197
0.1594
0.1311
0.2250
0.3422
0.2391
0.1240
0.2068
0.2784
0.1800
0.3133
0.1167
0.3066
0.1008
0.1729
0.3045
0.2187
0.2862
0.2361
0.1560
0.1271
0.2467
0.2201
0.1423
0.1531
0.2995
0.2069
0.2126
0.1369
0.1566
[torch.FloatTensor of size 128]
), (u'layer2.1.bn1.weight', Parameter containing:
0.3323
0.2908
0.3246
0.3435
0.3011
0.3054
0.3041
0.3539
0.2862
0.3601
0.2970
0.3381
0.2565
0.3276
0.3030
0.4085
0.3519
0.4218
0.3055
0.2551
0.3425
0.3215
0.3366
0.2700
0.2849
0.3954
0.3166
0.3286
0.3515
0.3953
0.2768
0.3625
0.1988
0.2717
0.3355
0.2797
0.2510
0.3832
0.3266
0.3263
0.3681
0.3401
0.3651
0.3391
0.3071
0.3231
0.3691
0.2410
0.3536
0.3189
0.3238
0.3611
0.3086
0.3309
0.3886
0.4362
0.4550
0.2962
0.3071
0.3386
0.3317
0.3228
0.2393
0.3147
0.2738
0.3218
0.3198
0.3411
0.3611
0.2833
0.3035
0.3183
0.3146
0.3890
0.2607
0.3479
0.3236
0.3709
0.2592
0.3742
0.2555
0.2966
0.3505
0.3165
0.2808
0.2660
0.2817
0.4795
0.3372
0.2723
0.2955
0.3225
0.2470
0.3160
0.3515
0.3131
0.3372
0.2837
0.3540
0.2897
0.2490
0.3019
0.3114
0.3510
0.3022
0.3617
0.2859
0.2831
0.3243
0.2769
0.3314
0.2394
0.2932
0.2788
0.2686
0.3194
0.3542
0.2683
0.2955
0.2924
0.3538
0.4256
0.3603
0.3013
0.2763
0.4354
0.3991
0.2694
[torch.FloatTensor of size 128]
), (u'layer2.1.bn1.bias', Parameter containing:
-0.1735
-0.2337
-0.3383
-0.0806
-0.1920
-0.0621
-0.1885
-0.2830
-0.1680
-0.1796
-0.2645
-0.1983
-0.1183
-0.2432
-0.1706
-0.3090
-0.2661
-0.4040
-0.1949
-0.1392
-0.2449
-0.1242
-0.2012
-0.1901
-0.1014
-0.3468
-0.2245
-0.3272
-0.3057
-0.3289
-0.1532
-0.1967
-0.0667
-0.3281
-0.1418
-0.1527
-0.0987
-0.3243
-0.2252
-0.3462
-0.2284
-0.2263
-0.1810
-0.1564
-0.1730
-0.1507
-0.2913
-0.1643
-0.1998
-0.1532
-0.2211
-0.2247
-0.0913
-0.1563
-0.2453
-0.4854
-0.4428
-0.1021
-0.1615
-0.2125
-0.2239
-0.1952
-0.0447
-0.1733
-0.1178
-0.4775
-0.2110
-0.2305
-0.1795
-0.1582
-0.2008
-0.2041
-0.1974
-0.2750
-0.0395
-0.2161
-0.2786
-0.2626
-0.0997
-0.2953
-0.1431
-0.1448
-0.1894
-0.1283
-0.1807
-0.1144
-0.1308
-0.4154
-0.2324
-0.1376
-0.1154
-0.2099
-0.0966
-0.1669
-0.3835
-0.2545
-0.1603
-0.1904
-0.2420
-0.1658
-0.1133
-0.1498
-0.1213
-0.2318
-0.2017
-0.3827
-0.1491
-0.1174
-0.1261
-0.2031
-0.1832
-0.2274
-0.1281
-0.2557
-0.1400
-0.0723
-0.2212
-0.1486
-0.2914
-0.1116
-0.2194
-0.4898
-0.3693
-0.1437
-0.1232
-0.3723
-0.6794
-0.1536
[torch.FloatTensor of size 128]
), (u'layer2.1.conv2.weight', Parameter containing:
( 0 , 0 ,.,.) =
-1.6153e-02 5.0134e-03 -9.0186e-04
-8.8386e-03 -1.9390e-02 -2.4174e-02
6.3052e-03 1.0245e-02 -1.3816e-02
( 0 , 1 ,.,.) =
-1.0979e-02 2.6164e-03 2.3656e-02
-1.7687e-02 1.9861e-02 6.4150e-02
6.0224e-03 7.6342e-02 1.0215e-01
( 0 , 2 ,.,.) =
-8.1113e-03 6.8414e-03 2.5436e-02
-8.0696e-03 9.2929e-03 8.2899e-03
7.7306e-03 1.2159e-02 7.1625e-03
...
( 0 ,125,.,.) =
1.5175e-02 6.2196e-03 2.1798e-02
-1.5199e-02 -8.5439e-02 -2.4713e-02
-1.8460e-02 -4.9767e-02 -1.6818e-03
( 0 ,126,.,.) =
3.0728e-02 3.9962e-02 3.1253e-02
-1.8738e-02 -6.7510e-02 -2.7649e-02
2.8429e-02 3.1854e-02 1.0543e-02
( 0 ,127,.,.) =
-1.8320e-02 -1.5854e-02 -1.0685e-02
-2.7442e-02 -3.0616e-02 -1.0485e-02
-1.5122e-02 -1.0595e-02 -2.5322e-02
⋮
( 1 , 0 ,.,.) =
3.6868e-03 3.0996e-02 4.2763e-02
4.6537e-02 4.8606e-02 2.3800e-03
1.6654e-02 1.2900e-02 -1.8230e-02
( 1 , 1 ,.,.) =
-1.0441e-02 -1.5934e-03 -1.6128e-02
-1.2799e-02 4.9570e-03 -1.4585e-02
-2.3553e-02 -3.7023e-03 -1.4399e-02
( 1 , 2 ,.,.) =
1.0338e-02 -1.7560e-02 -3.3046e-02
-3.2090e-02 -5.9258e-03 2.0201e-03
-4.1428e-02 4.9121e-03 1.6906e-02
...
( 1 ,125,.,.) =
-4.9525e-02 -4.6498e-02 -5.9916e-02
-2.6670e-02 -1.9079e-02 -2.9419e-02
-3.9683e-03 1.9405e-02 7.3317e-03
( 1 ,126,.,.) =
1.4293e-02 1.5643e-02 5.8117e-04
5.1493e-03 7.4332e-03 -3.6928e-03
-1.3522e-02 -8.5536e-03 -2.1259e-03
( 1 ,127,.,.) =
-3.0908e-02 -1.9839e-02 -1.9375e-02
-1.0368e-02 -2.4294e-02 2.4103e-04
-1.9275e-02 -2.9707e-02 -1.5623e-02
⋮
( 2 , 0 ,.,.) =
-4.9212e-02 -2.9588e-02 8.8023e-02
4.7453e-03 4.3564e-02 9.3115e-02
7.4083e-02 4.2868e-02 -5.1033e-02
( 2 , 1 ,.,.) =
6.6992e-03 2.1676e-02 -5.4254e-04
1.9286e-02 1.0920e-02 -4.5440e-03
3.1075e-02 -1.7168e-03 -2.7603e-02
( 2 , 2 ,.,.) =
6.0096e-02 -2.9359e-02 -5.8911e-02
-1.9133e-02 -8.1624e-02 -2.2553e-02
1.1597e-02 2.5092e-02 1.2130e-02
...
( 2 ,125,.,.) =
5.4307e-03 -2.3130e-02 9.6233e-03
-4.3785e-02 -2.6735e-02 2.1993e-02
-3.5919e-02 -4.1009e-02 -2.1860e-02
( 2 ,126,.,.) =
3.3705e-02 6.2938e-02 4.3502e-02
1.1111e-03 1.9243e-02 -1.9707e-03
-1.1493e-02 -5.3445e-02 -9.6676e-03
( 2 ,127,.,.) =
-2.6664e-03 -2.6954e-02 -1.7667e-02
-8.3382e-03 8.9920e-03 8.1260e-04
-2.6832e-02 -3.5991e-02 -4.2495e-02
...
⋮
(125, 0 ,.,.) =
-1.8876e-03 -2.2728e-02 -4.2991e-03
-9.2231e-03 -3.4333e-02 -1.3392e-02
-1.2774e-02 -1.1435e-02 1.5617e-02
(125, 1 ,.,.) =
1.0703e-02 1.2792e-02 2.2662e-02
7.3185e-03 -1.7847e-02 1.0674e-02
-1.5936e-02 -1.9318e-02 2.1768e-02
(125, 2 ,.,.) =
-7.3009e-03 3.0234e-02 -1.1899e-02
-2.6099e-02 3.7452e-03 3.2776e-02
-3.3101e-02 -7.1923e-03 1.6559e-02
...
(125,125,.,.) =
-3.2818e-02 -1.0021e-01 -4.7012e-02
2.8293e-03 4.1410e-02 -1.1391e-02
-1.1152e-02 -5.5861e-03 1.9968e-02
(125,126,.,.) =
-2.3932e-02 -3.0687e-02 -1.1756e-03
1.5311e-03 -3.5002e-02 -2.4414e-02
-8.7575e-03 -7.7842e-02 -3.8842e-02
(125,127,.,.) =
2.6107e-02 1.5406e-02 1.7569e-02
-1.5130e-02 -4.8687e-03 3.0773e-03
-1.3470e-02 -9.3201e-03 -4.8982e-03
⋮
(126, 0 ,.,.) =
-2.0228e-02 -3.0006e-02 -9.8419e-03
-3.8676e-02 -3.3481e-02 -7.4265e-03
-2.8935e-02 -3.2037e-02 2.9245e-03
(126, 1 ,.,.) =
-1.2900e-02 3.8046e-03 1.5940e-02
-2.4030e-02 2.0666e-03 5.7250e-03
6.9989e-03 1.2192e-02 1.5406e-02
(126, 2 ,.,.) =
-1.5018e-02 -9.0988e-03 2.4450e-02
1.0039e-02 1.2561e-02 2.6997e-02
2.9556e-02 1.9463e-02 -2.6584e-03
...
(126,125,.,.) =
-1.8481e-02 3.9417e-04 9.9768e-03
-4.5447e-03 1.2307e-02 3.5507e-02
-1.1873e-03 -2.6185e-03 1.1547e-02
(126,126,.,.) =
4.6292e-03 -1.3690e-02 -1.0171e-02
1.2104e-02 1.6793e-02 1.3003e-02
1.3328e-03 3.4701e-03 1.7323e-02
(126,127,.,.) =
-8.7332e-05 5.8646e-03 -3.5117e-03
3.8112e-03 -7.1828e-03 -1.1407e-02
1.9705e-02 2.0556e-02 5.7084e-03
⋮
(127, 0 ,.,.) =
3.6998e-02 3.2616e-02 -9.4535e-04
-2.9484e-02 -2.3441e-02 -2.8085e-02
-2.5451e-02 3.9048e-02 3.6686e-02
(127, 1 ,.,.) =
-1.8732e-02 -1.5352e-02 1.1149e-02
-2.1324e-03 -2.3177e-02 1.7628e-02
-4.0012e-03 1.5463e-02 9.2496e-03
(127, 2 ,.,.) =
-2.9346e-02 7.7071e-03 -5.6520e-03
-2.3611e-02 -1.9390e-03 2.0221e-02
8.0955e-03 -2.3268e-02 -2.8827e-02
...
(127,125,.,.) =
-3.3532e-02 -2.9092e-02 -4.0045e-02
2.6530e-03 -2.0568e-02 1.3075e-02
1.6061e-02 -5.5725e-02 -4.9167e-02
(127,126,.,.) =
-7.9132e-03 2.1466e-02 2.0913e-02
-1.7259e-02 -2.5851e-02 2.7177e-03
-4.6532e-02 -2.4846e-02 -1.9911e-02
(127,127,.,.) =
-5.0350e-02 -2.5574e-02 1.7763e-02
-3.4474e-02 5.5247e-03 -2.7754e-02
-2.0743e-02 -2.2332e-02 -4.3512e-02
[torch.FloatTensor of size 128x128x3x3]
), (u'layer2.1.bn2.running_mean',
-0.0303
0.0327
0.0240
-0.0763
-0.1589
-0.0804
-0.1797
-0.0701
-0.1573
0.1134
-0.0805
-0.0234
-0.0756
-0.1833
0.0384
0.0791
-0.0594
-0.0217
0.0288
-0.1023
-0.0698
-0.0484
0.1234
-0.1242
0.0584
-0.1045
-0.0082
-0.0536
0.0127
0.0269
-0.1785
-0.0514
-0.0503
0.0173
0.0162
-0.2532
-0.2817
-0.2388
-0.0641
0.0136
0.1397
-0.2827
0.0767
-0.0328
-0.0080
-0.0058
-0.1322
-0.0266
-0.3995
-0.0825
-0.1061
-0.0556
-0.0557
0.0552
-0.1259
-0.0077
-0.1017
-0.0532
-0.1570
0.0675
-0.5579
0.0523
-0.1109
0.0096
0.0103
-0.0968
-0.0100
-0.2631
-0.1013
-0.0156
-0.0544
-0.1436
-0.0747
-0.0609
-0.0710
-0.1171
0.0205
0.0372
-0.0162
-0.0245
0.1684
-0.2868
0.0558
0.0402
-0.1360
-0.0523
-0.0547
-0.1108
-0.2490
-0.0252
0.0783
-0.1927
-0.1427
-0.1122
-0.0599
-0.0884
-0.0191
0.0015
-0.5522
0.0042
0.0305
0.0242
-0.1246
-0.1543
0.0045
-0.1808
-0.2224
0.0909
0.0329
0.5456
-0.0230
0.0628
0.0464
-0.0874
-0.0300
0.1108
-0.0492
-0.0331
-0.2471
-0.0352
0.0516
0.0709
-0.2409
-0.0650
-0.1684
-0.0565
-0.1306
-0.0627
[torch.FloatTensor of size 128]
), (u'layer2.1.bn2.running_var',
1.00000e-02 *
1.8228
2.1225
2.8612
2.0886
2.9474
4.7744
3.5290
2.8429
2.8470
4.5069
2.9840
1.9491
1.7088
2.9840
2.8539
2.8518
2.0139
2.2774
3.2135
3.3348
1.7759
4.7420
3.0149
4.1645
3.7812
4.6252
2.9589
1.6504
2.6924
3.0834
3.6263
4.5937
3.1620
2.6538
2.2904
5.8237
5.6463
4.0456
2.6616
3.2348
3.3927
8.4368
2.4933
1.7348
2.2366
2.1032
1.9272
1.5102
5.6145
3.9999
1.8257
4.0509
3.2031
2.2098
3.3824
2.2704
2.5319
1.8465
6.8178
2.6885
7.2726
2.1805
4.9063
2.6663
2.3015
1.3440
4.7817
5.8346
3.3150
4.7472
1.8629
3.3559
4.5253
3.1564
3.6324
3.4589
4.7584
3.2355
1.7391
3.5121
1.8529
5.3177
1.3671
3.0469
3.7829
1.6996
4.1624
3.1600
3.2903
1.6922
3.2056
4.3576
3.5142
2.4761
1.6919
3.8553
3.6356
1.7814
6.2490
4.0622
2.5852
2.2963
2.7265
2.9650
2.0724
4.4788
5.7808
2.0073
3.9706
5.8224
4.3781
3.6008
2.6018
3.5214
1.9792
3.2273
4.9339
1.6944
6.2593
2.9896
2.5511
1.5677
3.6686
1.5467
3.1936
2.8402
2.8767
4.4939
[torch.FloatTensor of size 128]
), (u'layer2.1.bn2.weight', Parameter containing:
0.1194
0.1625
0.3084
0.2931
0.2957
0.5263
0.4038
0.2024
0.3401
0.1982
0.2559
0.2311
0.1630
0.2891
0.2248
0.2311
0.2417
0.2187
0.1922
0.3103
0.2015
0.4802
0.2481
0.3898
0.3204
0.4035
0.2617
0.1551
0.2256
0.2117
0.2708
0.3537
0.2505
0.1843
0.2465
0.6501
0.3898
0.4289
0.1799
0.1604
0.1775
0.3600
0.2694
0.1283
0.1662
0.1716
0.1837
0.1710
0.4178
0.3249
0.1759
0.4717
0.4115
0.1995
0.2025
0.1492
0.2860
0.1072
0.3649
0.1906
0.5369
0.2400
0.4411
0.1702
0.1993
0.2045
0.1972
0.4041
0.3034
0.6168
0.2284
0.3228
0.4547
0.4370
0.1570
0.4057
0.5791
0.2338
0.1586
0.3130
0.2201
0.3195
0.1166
0.2517
0.2184
0.0989
0.3116
0.2613
0.3277
0.1778
0.2718
0.4174
0.5140
0.2136
0.1905
0.2898
0.2472
0.1341
0.6212
0.1810
0.2394
0.1417
0.1759
0.2827
0.1987
0.3775
0.3749
0.1274
0.3656
0.4305
0.4212
0.2673
0.2016
0.5098
0.1449
0.4408
0.3583
0.2503
0.5682
0.2518
0.1392
0.0617
0.3406
0.1313
0.4586
0.2914
0.1326
0.3915
[torch.FloatTensor of size 128]
), (u'layer2.1.bn2.bias', Parameter containing:
-0.1403
-0.0889
-0.4147
-0.2264
-0.0737
-0.3534
-0.3379
-0.0752
-0.1791
0.0448
-0.2842
-0.1765
-0.1591
-0.0675
-0.1543
-0.1061
-0.2334
-0.0981
-0.0908
-0.0567
-0.1908
-0.2055
-0.2704
-0.1883
-0.3570
-0.1125
-0.1632
-0.0211
-0.1687
-0.2124
-0.1713
-0.0872
-0.2194
-0.1888
-0.2954
-0.4570
-0.0226
-0.0527
0.0406
-0.0609
-0.0456
-0.1176
-0.0145
0.0318
-0.2046
-0.0953
-0.0496
-0.1051
-0.0793
-0.1933
-0.1467
-0.3215
-0.3257
-0.2287
-0.0356
-0.1869
-0.1932
-0.0771
0.2768
-0.0656
-0.0895
-0.2548
-0.2365
0.0021
-0.0987
-0.3178
0.1613
0.0006
-0.2347
-0.4150
-0.1310
-0.3142
-0.2582
-0.5400
0.0772
-0.2546
-0.4454
-0.0262
-0.0937
-0.2201
-0.2044
-0.0155
-0.0893
-0.2167
0.1112
-0.0619
-0.1217
-0.1593
-0.1317
-0.1717
-0.3729
-0.3354
-0.3414
0.0358
-0.2067
-0.1087
0.0141
-0.0338
-0.2129
-0.1122
-0.1627
-0.2000
0.0908
-0.0041
-0.1313
-0.2942
0.0160
-0.1065
-0.1289
-0.1699
-0.1721
-0.1809
-0.2295
-0.3611
-0.1746
-0.3540
-0.1554
-0.2709
-0.2607
0.0084
-0.0311
-0.0022
-0.0831
0.0380
-0.4893
-0.2749
0.1245
-0.1272
[torch.FloatTensor of size 128]
), (u'layer3.0.conv1.weight', Parameter containing:
( 0 , 0 ,.,.) =
-1.5906e-02 -1.6618e-02 -1.5938e-02
-5.2744e-03 1.5103e-02 9.8805e-03
-1.4850e-02 3.6254e-04 -1.1378e-02
( 0 , 1 ,.,.) =
-9.4971e-03 -1.8568e-02 -6.0605e-03
9.7622e-03 -1.2294e-02 -5.2978e-03
7.0518e-03 -1.6063e-02 -7.1445e-03
( 0 , 2 ,.,.) =
-2.2693e-02 -3.7669e-02 -3.3695e-02
-3.1569e-02 -5.8022e-02 -3.9105e-02
-3.4616e-02 -3.8806e-02 -1.5695e-02
...
( 0 ,125,.,.) =
4.8713e-03 7.9539e-03 1.4374e-02
-1.5242e-03 2.4200e-02 5.6440e-03
-4.4355e-03 6.2454e-03 6.8561e-03
( 0 ,126,.,.) =
1.6028e-02 -1.2036e-02 -1.3101e-03
9.5804e-03 5.7272e-03 1.6091e-03
-9.9173e-03 -1.3593e-02 -6.3679e-03
( 0 ,127,.,.) =
5.3450e-02 4.6441e-02 2.4824e-02
3.4065e-02 -2.8656e-03 -4.1207e-03
-1.4000e-02 -4.6092e-03 -1.4152e-02
⋮
( 1 , 0 ,.,.) =
-1.1567e-03 -1.8638e-02 -3.4453e-02
4.9889e-03 -1.1695e-02 -3.3321e-02
5.9653e-03 -1.6154e-02 -1.7452e-02
( 1 , 1 ,.,.) =
1.0729e-02 1.3964e-02 -1.9171e-02
2.8854e-03 1.2573e-02 7.2767e-03
-1.6815e-02 -1.8740e-02 -1.3784e-03
( 1 , 2 ,.,.) =
-2.1852e-02 6.2900e-03 1.5931e-02
-3.5272e-03 5.6997e-03 3.1077e-02
2.3169e-03 3.2389e-03 1.7490e-02
...
( 1 ,125,.,.) =
-1.6246e-02 -7.7688e-03 7.7471e-03
-1.4870e-03 -1.2226e-02 -9.3389e-03
8.6164e-04 -2.2071e-03 7.3769e-03
( 1 ,126,.,.) =
2.9310e-03 -2.3592e-02 5.8461e-03
1.4344e-02 -1.6924e-02 -6.1749e-03
-7.7191e-03 -3.2305e-02 -3.3688e-02
( 1 ,127,.,.) =
8.6900e-03 1.3976e-02 8.0760e-03
-3.3662e-03 1.0516e-02 1.4952e-02
1.8944e-02 3.0948e-02 2.5647e-02
⋮
( 2 , 0 ,.,.) =
-3.5797e-02 -2.2565e-02 -1.4440e-02
-7.5372e-03 -2.2142e-02 1.1150e-02
-3.6385e-03 -1.4821e-02 -1.6427e-02
( 2 , 1 ,.,.) =
-1.4620e-02 -3.0657e-02 -2.0434e-02
-2.8462e-02 -4.5328e-02 -5.7915e-02
2.8774e-02 -1.5172e-02 -2.4541e-02
( 2 , 2 ,.,.) =
1.7403e-02 1.9920e-02 -4.6249e-03
1.7813e-02 2.3648e-02 1.3638e-02
2.9347e-02 4.3449e-02 1.8594e-02
...
( 2 ,125,.,.) =
7.9258e-03 -1.2183e-02 -1.5811e-02
-1.0720e-02 -3.1404e-02 -7.5279e-03
-7.0299e-03 -1.7342e-02 -3.0783e-02
( 2 ,126,.,.) =
-1.0258e-02 -1.1796e-02 -1.7141e-02
-2.6423e-02 -1.5036e-03 2.7959e-02
-8.9306e-03 5.3510e-03 9.6632e-03
( 2 ,127,.,.) =
1.4481e-02 -3.1531e-02 -1.9707e-02
-1.4944e-02 -1.7709e-02 7.6966e-03
1.2465e-02 7.1035e-03 -6.1596e-03
...
⋮
(253, 0 ,.,.) =
5.3120e-03 2.5512e-02 7.1053e-03
1.9666e-02 2.6990e-02 4.2043e-02
4.1191e-02 2.2283e-02 3.5003e-02
(253, 1 ,.,.) =
2.5968e-03 4.0685e-03 1.0626e-02
4.6474e-03 2.0337e-02 8.0847e-03
1.4475e-02 -3.0070e-03 -1.9656e-02
(253, 2 ,.,.) =
-4.0235e-03 2.5510e-02 2.2875e-03
-1.5182e-02 2.6031e-02 8.2526e-03
-2.1065e-03 2.6928e-02 3.2296e-03
...
(253,125,.,.) =
5.5063e-03 -4.8631e-03 1.8346e-02
8.5499e-03 2.3002e-03 7.7201e-03
8.2280e-03 9.5818e-03 2.1510e-02
(253,126,.,.) =
-1.7702e-02 9.9203e-03 -1.2934e-02
-1.2670e-02 9.5506e-03 -1.2438e-02
8.9810e-03 4.9343e-02 3.6238e-02
(253,127,.,.) =
1.2333e-02 1.8408e-02 -1.7794e-02
5.7676e-03 -5.7844e-03 -1.1706e-02
3.4462e-03 -1.0299e-02 -4.2529e-02
⋮
(254, 0 ,.,.) =
3.1634e-02 7.6514e-02 4.4300e-02
9.3963e-02 1.4798e-01 1.5104e-01
6.6483e-02 1.3856e-01 1.1323e-01
(254, 1 ,.,.) =
-2.8205e-02 -4.0731e-03 -1.9967e-02
-1.9283e-02 -1.2330e-03 1.0728e-02
-1.6487e-02 -2.7540e-03 7.7751e-04
(254, 2 ,.,.) =
-1.2156e-02 -3.2183e-02 -1.5299e-02
-9.1752e-04 -1.2350e-02 -3.8531e-03
-1.9342e-02 -1.0735e-02 -2.1051e-02
...
(254,125,.,.) =
-3.0457e-03 6.5687e-03 -3.2163e-04
1.4628e-02 -1.6662e-02 1.4216e-02
2.2738e-02 1.2016e-02 7.1802e-03
(254,126,.,.) =
3.9151e-03 -1.9739e-02 1.1058e-02
-2.5105e-02 -3.8439e-02 -4.4722e-02
-3.5862e-02 -9.8120e-02 -6.8447e-02
(254,127,.,.) =
-8.4853e-03 2.2905e-03 3.0757e-03
3.8484e-03 1.8156e-02 6.9025e-03
8.9456e-03 8.0009e-03 1.2579e-02
⋮
(255, 0 ,.,.) =
-1.3006e-02 -9.0262e-03 1.0574e-03
-2.5979e-02 -1.9484e-02 -9.3637e-03
4.8438e-03 2.3742e-03 1.0574e-02
(255, 1 ,.,.) =
-2.4782e-03 -1.4049e-02 -2.8621e-02
-2.3822e-03 1.1463e-03 -2.3321e-02
1.2275e-02 8.3306e-04 1.4305e-03
(255, 2 ,.,.) =
-4.8958e-02 -4.3860e-02 -5.7901e-02
-3.5920e-02 -3.6503e-02 -3.8574e-02
-4.1023e-02 -3.3337e-02 -1.3673e-02
...
(255,125,.,.) =
-1.1772e-02 -8.1042e-03 -1.5803e-02
-2.7190e-02 -2.8550e-02 7.5042e-03
-2.4363e-02 1.3943e-02 6.0615e-03
(255,126,.,.) =
-2.7317e-02 1.9704e-02 2.2183e-02
-3.7557e-02 2.0815e-02 1.8682e-02
-4.4557e-02 -4.3529e-03 -1.6779e-02
(255,127,.,.) =
1.9939e-02 2.6802e-02 1.1996e-02
2.0260e-02 2.1540e-02 2.5003e-03
1.8079e-04 -7.6315e-03 -1.9582e-02
[torch.FloatTensor of size 256x128x3x3]
), (u'layer3.0.bn1.running_mean',
-0.1253
-0.2262
-0.4860
-0.1458
-0.6311
0.0073
-0.0597
0.0038
-0.1363
-0.2213
-0.3844
-0.5783
-1.2715
-0.4546
-1.4092
-0.4864
0.2884
-0.7827
0.3060
-0.3542
-0.5711
-0.7998
0.0888
-0.1439
-0.6867
-0.8588
-0.5447
0.2983
-0.1919
0.1344
-0.6387
-0.4716
0.6139
-0.0065
0.0092
-0.7543
-0.3666
-0.1479
-0.7263
-0.3064
-0.3003
-0.4880
-0.3688
-0.3295
-0.1466
-0.6681
-0.1217
-0.5661
-0.7542
-0.4977
-0.1982
-0.7480
0.2935
-0.5039
-0.4152
-0.1846
-0.0653
-0.3617
0.0979
-0.0989
-0.8747
-0.6866
-0.2850
-0.1807
-0.7564
0.4896
-0.4719
-0.3251
0.2361
-0.2823
-0.5454
-0.5703
-0.3914
-0.7459
-0.3127
0.4983
-0.4290
0.0501
-0.1465
-0.6060
0.3132
-0.3743
-0.5826
-0.3843
-0.1076
-0.5657
-0.3102
0.3179
-0.7787
-0.0326
-0.4723
-0.5669
-0.0142
-0.5974
-0.3175
-0.9361
-0.1838
0.1329
-1.0321
-0.0591
-0.4599
-0.5094
0.2070
-0.0520
0.1508
-0.8619
-0.0878
-0.8132
-0.3859
-0.2299
-0.6100
-0.2246
-0.3464
-0.9515
0.0855
0.3101
-0.4721
-0.4155
0.0080
-0.1732
-0.6501
-0.6203
-0.1372
-0.1522
-0.2870
-0.4941
0.0966
0.5073
-0.2510
-0.3032
-0.3150
-0.5733
-0.0545
-0.3441
-0.7644
-0.2321
-0.7738
-0.1745
0.2423
-0.3351
-0.1296
-0.5125
-0.1101
-0.8768
-0.2860
-0.3560
-0.1244
-0.2997
-0.1577
-0.3160
-0.1748
0.5893
0.1252
-0.2802
-0.0514
-0.6605
-0.1989
-0.1062
-0.0844
-0.6724
-0.0008
-0.2606
-0.3828
-0.1674
-1.4552
-0.4452
-0.2158
-0.5878
-0.4179
-0.6215
0.1737
-0.5887
-0.5720
0.0747
-0.6005
-0.3461
-0.3260
-0.3577
-0.0933
-0.3588
-0.3935
-0.9551
-0.9143
-0.2762
-0.3652
-0.1704
-0.2676
-0.2292
-0.3800
-0.4927
-0.2178
-0.3614
-0.1274
-0.5203
-0.5437
0.0210
-0.6357
-0.5927
-0.1611
-0.1015
-0.4067
-0.4212
-0.2671
-0.3272
-0.4998
0.0105
-0.3977
-0.4612
-0.0671
0.1528
-0.1927
-0.4018
-0.5817
-0.3383
-0.5079
-0.6062
-0.2094
0.0344
0.0049
-0.0074
-0.8431
-0.8824
-0.3549
-0.2095
-0.4937
-0.2907
-0.4414
-0.4896
0.0836
-0.9780
-0.4721
-0.1474
-0.3185
-0.2436
-0.1797
-0.0429
-0.2972
-0.4299
-0.3125
-0.3699
-0.4899
-0.0979
-0.7804
-0.3924
0.0850
-0.5030
-0.6755
-0.2506
-0.4354
-0.2441
0.0193
-0.3442
-0.6758
-0.4484
-0.1628
-0.6801
[torch.FloatTensor of size 256]
), (u'layer3.0.bn1.running_var',
0.2509
0.2989
0.2810
0.2616
0.3038
0.3614
0.1749
0.2615
0.2315
0.2593
0.3199
0.2039
0.3937
0.2819
0.6928
0.1669
0.1971
0.2347
0.1798
0.2584
0.2045
0.2247
0.2575
0.1896
0.2243
0.3290
0.2262
0.1629
0.1750
0.2162
0.2686
0.1990
0.3028
0.2474
0.6300
0.2747
0.2340
0.2184
0.3476
0.1966
0.1739
0.2011
0.1882
0.1917
0.2349
0.1796
0.2018
0.1950
0.2186
0.2595
0.1522
0.2088
0.1380
0.5258
0.1659
0.3283
0.1931
0.2347
0.1449
0.2613
0.2720
0.1855
0.2469
0.2337
0.2525
0.1487
0.1740
0.2101
0.3507
0.1668
0.2851
0.1874
0.1725
0.2619
0.1903
0.2774
0.1875
0.2584
0.1635
0.2693
0.1709
0.7093
0.2264
0.2439
0.2717
0.2020
0.2420
0.1979
0.3249
0.2325
0.2174
0.2400
0.2201
0.1914
0.2311
0.4723
0.2749
0.2033
0.4373
0.2124
0.1956
0.1570
0.2497
0.2723
0.1928
0.2726
0.1942
0.2862
0.2731
0.2348
0.3259
0.3079
0.2799
0.1865
0.2416
0.2262
0.3502
0.2169
0.2371
0.1750
0.2822
0.1983
0.3979
0.2380
0.1798
0.2661
0.1640
0.4260
0.2032
0.1764
0.1802
0.2821
0.4783
0.1895
0.3361
0.2009
0.1541
0.2021
0.2365
0.3530
0.1833
0.6131
0.1840
0.2772
0.2735
0.1799
0.4005
0.2144
0.2677
0.2665
0.4213
0.2373
0.2408
0.2575
0.3893
0.1723
0.3173
0.2014
0.5098
0.2254
0.2103
0.3155
0.3065
0.1814
0.2512
0.1665
0.2078
0.2352
0.2161
0.1674
0.4302
0.3045
0.3518
0.1620
0.2234
0.2028
0.1523
0.3315
0.2086
0.3005
0.2760
0.1988
0.1683
0.2111
0.3077
0.2803
0.3045
0.1773
0.1797
0.1470
0.2122
0.2147
0.1688
0.1913
0.2067
0.2444
0.2609
0.2750
0.2597
0.2373
0.2216
0.3981
0.7746
0.2015
0.1734
0.3637
0.1748
0.2495
0.2457
0.1559
0.2741
0.3765
0.2767
0.2841
0.2553
0.1582
0.3328
0.1996
0.2284
0.2720
0.2520
0.2724
0.1931
0.2924
0.2629
0.3760
0.2206
0.2616
0.1907
0.2821
0.2752
0.2303
0.2730
0.2340
0.2235
0.1466
0.2869
0.2763
0.2823
0.1843
0.1804
0.2244
0.1840
0.1446
0.2126
0.1792
0.2546
0.1661
0.1881
0.1667
0.2371
0.2523
0.2260
0.2728
0.2028
0.4802
[torch.FloatTensor of size 256]
), (u'layer3.0.bn1.weight', Parameter containing:
0.2856
0.2425
0.3032
0.3168
0.3011
0.3475
0.3076
0.3105
0.3646
0.3255
0.2195
0.3167
0.2674
0.3104
0.3026
0.3443
0.2915
0.3379
0.2887
0.2996
0.3588
0.3164
0.2882
0.2917
0.3492
0.3749
0.3587
0.3166
0.2756
0.2978
0.3364
0.2893
0.3106
0.2506
0.3460
0.3621
0.2570
0.3695
0.2935
0.3286
0.3243
0.3188
0.3093
0.3314
0.3550
0.2978
0.2737
0.3023
0.3179
0.2831
0.3065
0.3390
0.3053
0.3099
0.3017
0.3472
0.3034
0.2935
0.3352
0.3676
0.3163
0.3404
0.3078
0.2819
0.3794
0.3083
0.2778
0.3363
0.2284
0.3259
0.2790
0.3072
0.2975
0.3847
0.3372
0.2253
0.2827
0.3737
0.2796
0.3485
0.3879
0.3288
0.3340
0.3335
0.2756
0.3500
0.2897
0.2798
0.2907
0.3220
0.3824
0.3522
0.3278
0.3689
0.3147
0.3600
0.3123
0.2519
0.2355
0.3211
0.3203
0.3345
0.2768
0.3341
0.3153
0.3175
0.2224
0.2956
0.3206
0.2658
0.3662
0.2715
0.3655
0.3427
0.2820
0.2754
0.4669
0.3090
0.3468
0.3144
0.3220
0.2765
0.3301
0.3219
0.3152
0.2813
0.2497
0.3514
0.3264
0.3014
0.2734
0.3522
0.3831
0.3028
0.2940
0.2825
0.3099
0.2373
0.2705
0.4189
0.2985
0.3841
0.2754
0.3091
0.3169
0.2824
0.2749
0.3493
0.4018
0.3108
0.2176
0.2821
0.3199
0.3358
0.2468
0.3332
0.2876
0.2964
0.2385
0.3451
0.3081
0.2760
0.2533
0.2576
0.3092
0.2950
0.3089
0.3113
0.3475
0.3172
0.2474
0.3371
0.3450
0.3189
0.3150
0.3008
0.2694
0.3730
0.3235
0.2988
0.2812
0.3245
0.3630
0.2843
0.3533
0.3451
0.3244
0.3524
0.3118
0.3429
0.3215
0.2748
0.3287
0.3656
0.2901
0.2523
0.3284
0.2523
0.3426
0.2851
0.2918
0.2497
0.5159
0.3026
0.2743
0.2379
0.3524
0.3394
0.2264
0.2652
0.3759
0.3777
0.2459
0.3046
0.3067
0.3775
0.2976
0.3552
0.2696
0.2649
0.2872
0.2985
0.2867
0.3676
0.3494
0.3823
0.3246
0.3567
0.2662
0.3357
0.2935
0.2987
0.2664
0.3019
0.3175
0.2436
0.3274
0.2764
0.2466
0.2876
0.3060
0.3157
0.3329
0.2984
0.2961
0.3309
0.3729
0.3238
0.3491
0.3342
0.3037
0.3578
0.2849
0.2827
0.2809
0.3249
[torch.FloatTensor of size 256]
), (u'layer3.0.bn1.bias', Parameter containing:
-0.0915
0.0189
-0.1235
-0.0613
-0.1003
-0.1306
-0.1473
-0.1079
-0.2438
-0.1113
0.1361
-0.1477
0.0387
-0.0907
0.0352
-0.1851
-0.1319
-0.1746
-0.0815
-0.1004
-0.3394
-0.1712
-0.0807
-0.1228
-0.2263
-0.1503
-0.2314
-0.2327
-0.0854
-0.0802
-0.0716
-0.0839
-0.0592
0.0358
-0.0322
-0.2197
0.0027
-0.1471
-0.0264
-0.1886
-0.2417
-0.1494
-0.1904
-0.1089
-0.2657
-0.1362
-0.0487
-0.1340
-0.0930
-0.0064
-0.1721
-0.1476
-0.1714
0.0336
-0.1011
-0.1761
-0.1184
-0.0482
-0.3260
-0.1555
-0.0169
-0.2373
-0.1015
-0.1051
-0.2738
-0.1917
-0.0503
-0.1098
0.1484
-0.2282
-0.0700
-0.1427
-0.1417
-0.3096
-0.2043
0.0269
-0.0779
-0.0842
-0.0464
-0.1429
-0.3917
0.0257
-0.1779
-0.0993
-0.0507
-0.2222
-0.0951
-0.0861
-0.0743
-0.1666
-0.2054
-0.1782
-0.1150
-0.2525
-0.0694
-0.0536
-0.0499
-0.0311
0.1212
-0.0988
-0.1570
-0.3093
-0.0797
-0.0994
-0.1774
-0.0505
0.0766
-0.0480
-0.1278
-0.0651
-0.1737
0.0303
-0.1334
-0.2435
-0.0746
-0.0365
-0.1843
-0.0887
-0.1924
-0.1110
-0.1458
-0.0895
-0.0956
-0.2042
-0.1338
-0.0637
-0.0699
-0.1656
-0.1521
-0.1317
-0.0826
-0.2470
-0.1174
-0.1475
-0.0840
-0.0681
-0.1789
0.0288
-0.0362
-0.3005
-0.1441
-0.0812
-0.0492
-0.0657
-0.1249
-0.1104
0.0187
-0.1351
-0.1944
-0.0909
0.2067
-0.1081
-0.2499
-0.0999
0.0507
-0.1899
-0.0369
-0.1432
0.1279
-0.1782
-0.1172
-0.0099
0.0785
-0.0681
-0.0365
-0.1596
-0.1606
-0.0922
-0.1773
-0.1788
0.0306
-0.1101
-0.1355
-0.2244
-0.0860
-0.1232
-0.0927
-0.1666
-0.1393
-0.0898
-0.0614
-0.1740
-0.2503
-0.0593
-0.1272
-0.1422
-0.0743
-0.2208
-0.2207
-0.2742
-0.1302
-0.0916
-0.1696
-0.2481
-0.1524
0.0410
-0.1077
0.0408
-0.1915
-0.0697
-0.1049
-0.0110
-0.3257
-0.1336
-0.1021
0.0128
-0.2717
-0.1245
0.0288
-0.1025
-0.2405
-0.1476
0.1008
-0.0220
-0.0983
-0.4417
-0.0774
-0.3207
-0.0272
-0.0726
-0.0608
-0.0430
-0.0872
-0.1280
-0.1608
-0.1529
-0.1745
-0.1702
-0.0486
-0.1459
-0.0552
-0.0808
-0.0264
-0.0952
-0.1126
-0.0452
-0.0837
-0.0331
0.0127
-0.0865
-0.1446
-0.0732
-0.2160
-0.0952
-0.1297
-0.2008
-0.2135
-0.2204
-0.2381
-0.1787
-0.1386
-0.1901
-0.0981
-0.0850
-0.0761
-0.0586
[torch.FloatTensor of size 256]
), (u'layer3.0.conv2.weight', Parameter containing:
( 0 , 0 ,.,.) =
-9.2775e-03 -3.3897e-02 -1.1927e-02
-2.4595e-02 -7.9761e-02 -4.8709e-02
-4.3490e-02 -8.0118e-02 -6.5252e-02
( 0 , 1 ,.,.) =
-2.8918e-02 2.3763e-04 -2.8561e-02
9.8557e-03 1.0253e-02 -1.7677e-02
-1.0684e-02 2.8071e-03 -1.2483e-02
( 0 , 2 ,.,.) =
-1.4730e-02 2.2622e-02 4.4314e-03
1.5512e-02 1.0901e-02 -4.0294e-03
-2.0756e-02 -1.8048e-02 -1.7258e-02
...
( 0 ,253,.,.) =
3.1821e-04 -4.0924e-03 -7.9885e-04
-2.1664e-02 -2.2339e-02 -2.9870e-02
1.0453e-02 3.4707e-03 -1.1426e-02
( 0 ,254,.,.) =
9.6516e-03 1.8361e-02 3.7035e-02
3.7147e-03 1.0427e-02 1.5162e-02
8.4325e-03 1.8343e-02 3.0159e-02
( 0 ,255,.,.) =
1.3859e-03 8.4181e-03 9.7185e-03
2.6455e-02 4.1474e-02 5.5292e-02
1.6905e-02 6.1027e-02 5.6296e-02
⋮
( 1 , 0 ,.,.) =
1.1743e-02 1.6508e-02 5.1232e-03
2.9441e-02 2.0441e-02 2.1624e-02
7.8852e-03 1.3290e-02 1.1664e-02
( 1 , 1 ,.,.) =
-1.5315e-02 -2.1319e-02 -8.9703e-03
-2.9171e-02 -5.1600e-02 -4.3605e-02
-4.5486e-03 -3.7239e-02 -4.2013e-02
( 1 , 2 ,.,.) =
3.0217e-04 3.9781e-02 -1.4889e-04
1.2860e-02 3.3156e-02 1.6254e-02
-9.5886e-03 -5.6529e-03 -1.6966e-02
...
( 1 ,253,.,.) =
4.4662e-02 8.1982e-03 1.6867e-02
-6.6190e-03 -3.7080e-02 -5.9346e-03
-2.3913e-02 -6.0699e-02 -2.8947e-02
( 1 ,254,.,.) =
-5.7020e-03 -4.2262e-02 -2.1947e-02
-2.2780e-02 -3.1428e-02 -5.8322e-02
-1.9598e-02 -5.2995e-02 -4.8502e-02
( 1 ,255,.,.) =
6.4948e-03 3.2666e-03 9.3442e-03
1.0466e-03 -4.9306e-03 -1.1003e-02
-1.5981e-02 -1.0119e-02 -1.4555e-02
⋮
( 2 , 0 ,.,.) =
-6.1149e-03 -6.6849e-03 -6.9256e-03
-5.1692e-03 -8.9064e-03 -1.4313e-02
-1.1450e-02 -1.7125e-02 -2.3729e-02
( 2 , 1 ,.,.) =
3.9899e-02 1.6684e-02 2.0991e-02
1.6498e-02 -2.6236e-02 -1.1630e-02
5.9030e-03 -2.0597e-02 -1.5280e-02
( 2 , 2 ,.,.) =
6.0228e-03 2.4200e-02 2.0716e-02
4.9551e-03 -6.1590e-03 1.4790e-02
9.8595e-03 -2.7931e-02 -5.4261e-03
...
( 2 ,253,.,.) =
-9.7426e-03 -1.6989e-03 -1.0106e-02
-6.1351e-04 4.3355e-02 3.8143e-02
3.7943e-03 4.4980e-02 3.9165e-02
( 2 ,254,.,.) =
1.3395e-02 6.9187e-03 1.9631e-02
6.7533e-03 2.5027e-02 1.5162e-02
1.7857e-03 -4.3971e-03 3.7016e-03
( 2 ,255,.,.) =
-1.7440e-02 -1.6260e-02 -2.4000e-02
-1.9716e-02 -1.7364e-02 -1.7828e-02
-3.0010e-02 -1.3697e-02 -2.1068e-02
...
⋮
(253, 0 ,.,.) =
6.5624e-03 6.0837e-03 2.8446e-02
-1.2967e-02 -5.0910e-02 -2.0435e-02
-1.5419e-02 -1.4899e-02 -1.8056e-02
(253, 1 ,.,.) =
1.2341e-02 3.2479e-02 2.2650e-02
-4.2432e-03 -1.8113e-02 2.2224e-03
2.9012e-03 -1.7405e-02 3.1869e-03
(253, 2 ,.,.) =
-1.0992e-02 1.1080e-02 -1.4198e-02
8.2258e-03 3.0135e-02 4.1601e-02
6.0791e-04 1.6776e-04 2.1328e-02
...
(253,253,.,.) =
-7.5068e-04 2.6565e-02 1.1820e-02
-1.5916e-02 -7.4243e-03 -5.1214e-03
4.1732e-03 -6.8548e-03 -7.3191e-03
(253,254,.,.) =
-6.9767e-03 9.7686e-04 1.8935e-03
6.0631e-03 5.0983e-02 -3.4937e-03
-8.1496e-03 -3.0339e-02 -1.7409e-02
(253,255,.,.) =
-1.0048e-02 3.2093e-04 -1.1435e-03
-1.5435e-03 -2.9689e-02 -1.9539e-02
-9.6000e-04 4.8948e-03 1.5117e-02
⋮
(254, 0 ,.,.) =
1.6080e-02 1.2594e-02 5.4767e-03
-1.3241e-02 -1.9564e-02 -2.0807e-02
-7.7261e-03 -2.3040e-02 -2.0197e-02
(254, 1 ,.,.) =
-1.8947e-03 5.3025e-02 1.3421e-02
2.7344e-03 2.4908e-02 1.6726e-02
-1.9196e-02 -1.8768e-02 -1.9954e-02
(254, 2 ,.,.) =
8.0703e-03 2.9987e-02 5.7642e-04
3.5938e-03 2.5408e-02 -1.0444e-02
-9.6803e-04 -1.9317e-02 -1.2085e-02
...
(254,253,.,.) =
1.6295e-02 2.5060e-02 2.8950e-02
-7.3188e-03 -1.4100e-03 1.2378e-02
-2.1144e-02 -3.4673e-02 -1.9507e-02
(254,254,.,.) =
1.6469e-02 5.1930e-02 4.9364e-02
5.8284e-03 1.9868e-02 3.6292e-02
-4.9320e-03 -1.6470e-02 -1.2967e-02
(254,255,.,.) =
-1.0214e-02 -3.0802e-02 -3.4004e-02
5.5274e-03 -1.0925e-02 4.6995e-04
3.8212e-02 2.0936e-02 3.2566e-02
⋮
(255, 0 ,.,.) =
1.8364e-02 -3.0699e-03 1.0348e-02
-7.2351e-03 -1.2742e-03 -6.9527e-03
2.1686e-02 1.1490e-03 -3.2707e-03
(255, 1 ,.,.) =
-1.6594e-02 1.5176e-04 -9.1776e-03
1.5036e-02 5.8408e-02 2.1840e-02
-1.3606e-02 1.8126e-02 1.6354e-02
(255, 2 ,.,.) =
2.1872e-02 3.1581e-02 1.8289e-02
-2.1028e-03 -1.5633e-02 2.0265e-02
5.2924e-03 4.8438e-04 1.5701e-02
...
(255,253,.,.) =
4.4712e-03 -2.4757e-03 1.7267e-03
-5.2339e-03 -8.8001e-03 1.3738e-02
-1.0695e-02 1.0347e-03 1.6962e-02
(255,254,.,.) =
-5.9934e-03 -3.6803e-02 3.0996e-03
1.0224e-02 2.9117e-02 -7.3036e-04
9.9051e-03 5.9974e-02 2.7242e-02
(255,255,.,.) =
-9.1759e-03 -1.8297e-02 6.2411e-03
-3.1871e-02 -2.9350e-02 -1.4883e-02
-1.4808e-02 -1.2348e-02 -2.3609e-02
[torch.FloatTensor of size 256x256x3x3]
), (u'layer3.0.bn2.running_mean',
-0.1898
-0.4822
-0.0088
0.0064
-0.3401
-0.1041
-0.1626
-0.2259
-0.1119
-0.3254
-0.0254
-0.2351
-0.0790
-0.3306
-0.0956
-0.0415
-0.3207
-0.0037
-0.1830
-0.1295
-0.2069
-0.2632
-0.1351
-0.1295
-0.2527
-0.0104
-0.0875
-0.3375
-0.5001
-0.1199
-0.1989
-0.2964
-0.1924
-0.2904
-0.0091
-0.0104
0.0738
-0.1760
0.0442
-0.2232
0.0376
-0.1235
-0.0065
-0.2524
-0.0120
0.0555
0.1533
-0.1421
-0.1160
-0.0893
-0.1547
-0.1615
0.0208
-0.3496
-0.1477
-0.6155
-0.1364
-0.0405
-0.3246
-0.1697
0.1694
-0.0662
-0.2076
-0.3969
-0.2936
0.1080
-0.2798
-0.0859
-0.0713
-0.3520
-0.0642
-0.1993
0.0202
-0.3808
-0.0833
-0.1321
-0.3009
-0.1800
0.0824
-0.0532
0.1538
0.1777
0.1837
-0.1972
-0.0083
-0.2135
-0.3881
-0.1686
-0.1149
0.2055
-0.2054
-0.1345
-0.1579
-0.1801
-0.2133
-0.2940
-0.2087
-0.0419
-0.2158
-0.0453
-0.2935
-0.1574
0.0310
0.0154
-0.1013
0.0401
-0.4071
-0.2852
-0.2954
-0.2261
-0.1083
0.1359
-0.6190
-0.1957
-0.2018
-0.0181
-0.3157
-0.0974
-0.2188
0.0105
-0.0686
-0.2937
-0.3168
-0.1745
0.0286
-0.1721
-0.2043
-0.2114
-0.2032
-0.2170
0.0459
0.2110
-0.1009
-0.0560
0.1501
-0.1713
0.0171
-0.2029
-0.2175
0.0836
-0.0215
-0.3423
-0.1450
0.1632
-0.1679
-0.1672
-0.1634
-0.3611
-0.0664
-0.3015
-0.1192
0.0192
-0.1420
-0.2852
-0.3039
-0.0897
-0.0659
-0.0240
-0.2212
0.0306
-0.0083
-0.3773
-0.2584
0.0030
-0.0981
-0.2602
-0.1212
-0.2094
-0.1398
-0.1795
-0.1467
0.0102
-0.1396
-0.2732
-0.1427
-0.1136
-0.1668
-0.3346
-0.3108
-0.0469
-0.0733
-0.3828
-0.1082
-0.0854
-0.1564
-0.1707
-0.1396
0.0373
0.2787
-0.2415
-0.1196
-0.1453
-0.2642
-0.1012
0.0470
-0.1133
0.1593
-0.0566
-0.1868
-0.2362
0.0922
0.1657
0.1560
-0.1998
-0.1939
0.1154
0.0537
-0.2192
-0.0997
-0.2332
-0.1498
0.0317
0.0793
-0.2177
-0.2654
-0.2278
-0.0419
0.0142
-0.2111
-0.0224
0.0953
-0.1628
0.0981
-0.1220
-0.0360
-0.3884
0.1147
0.0069
-0.2821
-0.6060
-0.2243
0.0177
-0.0736
-0.1372
-0.0436
0.1616
-0.1906
-0.2774
0.1136
0.1891
0.0610
0.0161
-0.1046
-0.0830
0.0079
-0.0963
-0.1956
-0.1445
-0.1591
0.0612
-0.2552
0.0082
0.1980
-0.2280
-0.1163
-0.1644
[torch.FloatTensor of size 256]
), (u'layer3.0.bn2.running_var',
0.1079
0.1131
0.0924
0.0718
0.0979
0.1616
0.0678
0.0953
0.1568
0.1489
0.1486
0.1188
0.1037
0.3279
0.0749
0.0980
0.0845
0.0636
0.0813
0.1006
0.0732
0.0642
0.1933
0.0824
0.0592
0.1489
0.0738
0.0896
0.2559
0.1517
0.0685
0.1045
0.1164
0.1185
0.0899
0.1024
0.0694
0.1614
0.0858
0.0782
0.0630
0.1314
0.1087
0.1018
0.0709
0.0973
0.0638
0.0795
0.0911
0.0772
0.0806
0.0986
0.1304
0.1646
0.1006
0.1233
0.0703
0.1033
0.1296
0.2374
0.0908
0.1296
0.0678
0.1330
0.2067
0.0860
0.1207
0.1573
0.1656
0.0776
0.1666
0.1887
0.1168
0.1144
0.0464
0.1130
0.0565
0.0652
0.1031
0.1098
0.0761
0.1014
0.0627
0.0624
0.0985
0.1319
0.0639
0.0881
0.0646
0.1989
0.2018
0.0689
0.0622
0.1239
0.1009
0.1935
0.0756
0.1753
0.1422
0.1525
0.0735
0.1037
0.0774
0.0733
0.1340
0.0680
0.1867
0.0649
0.0885
0.1279
0.1967
0.1160
0.1611
0.0786
0.0696
0.1040
0.1400
0.0959
0.0994
0.0631
0.0447
0.1600
0.0627
0.1007
0.2316
0.1074
0.0695
0.0526
0.1179
0.2217
0.0784
0.0968
0.0907
0.0670
0.0758
0.0821
0.1580
0.1111
0.0929
0.0876
0.0962
0.0588
0.0759
0.0939
0.0777
0.1199
0.0678
0.1432
0.0874
0.0669
0.0881
0.1013
0.1303
0.0878
0.1138
0.0809
0.0946
0.0940
0.1462
0.1615
0.0843
0.1210
0.0923
0.0445
0.0966
0.1024
0.0637
0.0707
0.0742
0.1396
0.0499
0.1179
0.0688
0.0487
0.0878
0.0864
0.0791
0.1722
0.0996
0.1252
0.0556
0.0761
0.0729
0.1205
0.0550
0.1080
0.1323
0.2279
0.0527
0.0671
0.0955
0.1127
0.1290
0.0872
0.0926
0.0790
0.0589
0.1320
0.0604
0.0704
0.0905
0.0549
0.1623
0.0629
0.0672
0.0640
0.0800
0.1275
0.1319
0.0743
0.1381
0.0659
0.1329
0.1962
0.0738
0.1416
0.1639
0.0966
0.0992
0.0786
0.0766
0.0907
0.0760
0.1000
0.1351
0.0603
0.0603
0.0767
0.0671
0.1331
0.1200
0.0779
0.0832
0.0619
0.1206
0.0986
0.0742
0.0960
0.0676
0.0902
0.1195
0.0415
0.0926
0.1191
0.1055
0.1332
0.0862
0.0865
0.0650
0.0798
0.0661
0.1010
0.1038
0.1005
0.0945
0.0611
[torch.FloatTensor of size 256]
), (u'layer3.0.bn2.weight', Parameter containing:
0.3212
0.2124
0.2661
0.3594
0.2785
0.2582
0.3108
0.3096
0.3348
0.2992
0.2545
0.2458
0.3133
0.4159
0.2997
0.3070
0.3135
0.4418
0.3743
0.2570
0.2943
0.3078
0.2738
0.3948
0.2928
0.3572
0.3435
0.5379
0.4243
0.3908
0.2745
0.2798
0.3217
0.1956
0.2751
0.3187
0.3507
0.2751
0.1919
0.3307
0.2850
0.3038
0.2179
0.2652
0.2944
0.2138
0.2184
0.2948
0.3262
0.3759
0.2557
0.3796
0.2950
0.3386
0.3243
0.3070
0.3331
0.2302
0.3036
0.3377
0.2922
0.2204
0.3267
0.3198
0.4023
0.2987
0.4860
0.2854
0.2716
0.4341
0.2834
0.2296
0.2507
0.3120
0.3673
0.3244
0.3380
0.3272
0.2868
0.2877
0.3210
0.2332
0.3379
0.2767
0.2942
0.2672
0.4401
0.2908
0.3771
0.2789
0.3056
0.3276
0.3871
0.2453
0.2559
0.2783
0.3168
0.3410
0.2318
0.3577
0.5036
0.3557
0.2475
0.1852
0.2273
0.3602
0.2919
0.3928
0.4423
0.2052
0.2524
0.2189
0.4113
0.3611
0.4284
0.2333
0.3504
0.7001
0.3754
0.2874
0.3702
0.3174
0.3640
0.2889
0.4155
0.2479
0.2898
0.3740
0.4926
0.2808
0.2388
0.3473
0.1868
0.2837
0.3090
0.3614
0.2797
0.6871
0.2854
0.2937
0.3128
0.4863
0.2193
0.2871
0.2554
0.4175
0.3044
0.3230
0.3343
0.4947
0.3924
0.2264
0.2657
0.4193
0.3483
0.3551
0.2877
0.2559
0.2459
0.2775
0.3842
0.2949
0.3510
0.1926
0.3101
0.3417
0.3931
0.3918
0.3239
0.2851
0.4583
0.2669
0.2663
0.4433
0.3221
0.3655
0.3336
0.4393
0.3970
0.3727
0.3523
0.3586
0.3286
0.4181
0.2955
0.3050
0.2988
0.4320
0.2309
0.3826
0.2270
0.2228
0.3206
0.3273
0.2627
0.3087
0.2920
0.2328
0.4144
0.4075
0.3264
0.3583
0.3014
0.3150
0.4438
0.4042
0.2028
0.3855
0.2570
0.2361
0.2343
0.3312
0.2303
0.3744
0.4727
0.3601
0.2754
0.1987
0.3027
0.3427
0.2994
0.2533
0.2639
0.3460
0.3847
0.4368
0.3786
0.3123
0.2591
0.3979
0.2577
0.3131
0.2934
0.3027
0.2942
0.2266
0.2806
0.2977
0.1858
0.2788
0.2504
0.3948
0.3496
0.2429
0.2155
0.2683
0.4100
0.3495
0.4243
0.2627
0.3329
0.2849
0.3924
0.3728
0.2655
0.3338
[torch.FloatTensor of size 256]
), (u'layer3.0.bn2.bias', Parameter containing:
-0.0264
0.0995
-0.0068
-0.0877
0.0078
0.0407
-0.0307
0.0060
0.0017
0.0478
0.0630
0.0358
-0.0504
0.0214
-0.0090
-0.0337
-0.0455
-0.1924
-0.0676
0.0775
-0.0340
-0.0799
0.1314
-0.1273
-0.0628
-0.0055
-0.0915
-0.1757
-0.0083
-0.0945
0.0025
-0.0319
-0.0158
0.1437
-0.0035
0.0108
-0.0511
0.0358
0.0878
-0.0452
-0.0458
0.0147
0.0687
0.0168
-0.0477
0.0568
0.0460
-0.0507
0.0059
-0.1034
0.0103
-0.1052
-0.0166
-0.0192
-0.0345
0.0201
-0.1362
0.0396
-0.0088
-0.0108
-0.0298
0.0721
-0.0669
-0.0094
-0.0310
-0.0267
-0.1418
0.1190
0.0669
-0.2137
0.0427
0.0478
0.0339
0.0001
-0.1482
-0.0237
-0.0743
-0.0684
-0.0201
0.0147
-0.0396
0.0194
-0.0696
-0.0558
0.0080
0.0236
-0.2578
0.0064
-0.1004
0.0280
0.0152
-0.0484
-0.1536
0.1049
0.0499
0.0657
-0.0541
0.0077
0.0941
-0.0200
-0.2356
-0.0623
0.0334
0.1102
0.0770
-0.0325
0.0481
-0.1499
-0.1650
0.1230
0.0712
0.0589
-0.0482
-0.0972
-0.1860
0.0853
-0.0516
-0.3080
-0.0604
-0.0771
-0.2728
0.0289
-0.1328
0.0173
-0.0392
0.0542
-0.0372
-0.1528
-0.1766
0.0839
0.0693
-0.0826
0.1118
-0.0508
-0.0448
-0.0375
0.0304
-0.3782
0.0149
0.0068
-0.0521
-0.2950
0.0899
0.0296
0.0199
-0.0835
-0.0964
-0.0238
0.0349
-0.2663
-0.1618
0.0736
0.0276
-0.1109
-0.0103
-0.0975
0.0140
0.0108
0.0784
0.0131
-0.0395
0.0248
-0.0774
-0.0284
0.0104
-0.0423
-0.1663
-0.0949
-0.0343
0.0455
-0.3000
-0.0069
0.0141
-0.2615
-0.0736
-0.1063
-0.0105
-0.0712
-0.1034
-0.0298
-0.1428
-0.0517
-0.0571
-0.0544
-0.0423
-0.0085
0.0159
-0.0654
-0.0613
-0.1450
0.0399
0.0816
-0.0078
-0.0341
0.0320
-0.0448
-0.0703
0.1021
-0.1799
-0.2117
-0.0598
-0.1160
0.0393
-0.0454
-0.1845
-0.1085
0.0558
-0.0636
0.0168
0.0002
0.0799
-0.0672
0.0798
-0.0040
-0.1902
0.0200
0.0732
0.1032
-0.0264
0.0240
-0.0442
0.0229
0.0234
-0.0235
0.0105
-0.2149
-0.1281
-0.0183
-0.0006
-0.0516
0.0566
-0.0543
0.0141
-0.0499
0.0673
0.0517
-0.0040
0.0351
0.0828
0.0100
0.0592
-0.2043
-0.0762
0.0414
0.0775
0.0760
-0.1592
-0.0836
-0.1663
0.0023
-0.0685
0.0381
-0.0987
-0.0203
0.0154
-0.1055
[torch.FloatTensor of size 256]
), (u'layer3.0.downsample.0.weight', Parameter containing:
( 0 , 0 ,.,.) =
8.0862e-03
( 0 , 1 ,.,.) =
-1.9208e-02
( 0 , 2 ,.,.) =
-1.7272e-02
...
( 0 ,125,.,.) =
-1.2758e-02
( 0 ,126,.,.) =
2.5496e-03
( 0 ,127,.,.) =
5.3547e-03
⋮
( 1 , 0 ,.,.) =
-1.4284e-02
( 1 , 1 ,.,.) =
-5.5428e-02
( 1 , 2 ,.,.) =
-3.4568e-02
...
( 1 ,125,.,.) =
2.7476e-02
( 1 ,126,.,.) =
3.5964e-02
( 1 ,127,.,.) =
2.3994e-02
⋮
( 2 , 0 ,.,.) =
7.6148e-03
( 2 , 1 ,.,.) =
2.0725e-02
( 2 , 2 ,.,.) =
-1.0066e-02
...
( 2 ,125,.,.) =
-2.7756e-02
( 2 ,126,.,.) =
6.3956e-03
( 2 ,127,.,.) =
-2.2016e-03
...
⋮
(253, 0 ,.,.) =
3.3605e-02
(253, 1 ,.,.) =
-4.2383e-02
(253, 2 ,.,.) =
2.2568e-02
...
(253,125,.,.) =
-3.3004e-02
(253,126,.,.) =
-9.1010e-04
(253,127,.,.) =
-1.7735e-02
⋮
(254, 0 ,.,.) =
-1.1416e-02
(254, 1 ,.,.) =
-1.8309e-02
(254, 2 ,.,.) =
7.6073e-03
...
(254,125,.,.) =
1.5128e-02
(254,126,.,.) =
3.3239e-02
(254,127,.,.) =
2.0724e-04
⋮
(255, 0 ,.,.) =
6.2636e-03
(255, 1 ,.,.) =
-2.0036e-02
(255, 2 ,.,.) =
1.0343e-03
...
(255,125,.,.) =
-1.9124e-02
(255,126,.,.) =
4.5483e-02
(255,127,.,.) =
7.8252e-03
[torch.FloatTensor of size 256x128x1x1]
), (u'layer3.0.downsample.1.running_mean',
-0.1077
-0.1229
-0.0681
-0.1930
-0.0571
-0.0224
-0.0338
-0.2437
-0.0447
0.0452
0.0008
0.0606
-0.0686
-0.0411
0.0435
-0.0873
-0.2157
-0.1593
-0.0157
-0.0698
-0.1796
-0.0204
0.0443
-0.1573
-0.0407
-0.1830
0.0180
-0.0895
-0.0434
-0.2033
-0.0171
0.1442
-0.0797
-0.1848
-0.0201
-0.0438
-0.1435
-0.0157
0.0630
-0.0223
-0.1470
-0.0833
-0.1568
0.0180
0.0083
0.1125
-0.0936
0.0647
-0.1352
-0.1372
0.1363
-0.1031
-0.1675
-0.2070
-0.0078
-0.0178
0.1123
-0.0876
-0.1877
0.0247
-0.2548
-0.1413
-0.0916
-0.1613
0.0087
-0.2045
-0.0420
-0.0763
-0.0522
-0.0029
-0.0424
0.1541
0.0664
-0.0733
-0.0935
-0.0226
-0.1797
0.0129
0.0465
-0.1008
-0.0652
0.0088
-0.0120
0.0576
-0.0571
-0.0667
-0.0228
-0.0880
-0.0192
0.0915
0.0212
-0.2866
-0.1851
0.0631
-0.0325
0.0106
-0.0163
-0.1375
-0.0208
0.0400
0.0382
-0.1582
-0.0242
-0.0104
-0.0253
-0.0071
-0.0822
-0.0029
0.0168
-0.1328
-0.0639
0.0832
-0.0666
-0.0080
-0.0459
0.0450
-0.1013
-0.0630
-0.0629
-0.1361
-0.0500
-0.0488
0.1090
-0.0700
-0.0801
-0.1030
-0.0278
-0.1242
-0.0585
-0.0263
-0.0371
-0.0678
-0.1683
0.0122
0.0382
-0.0072
-0.0424
-0.0864
-0.0058
0.0969
-0.2232
0.0251
-0.0298
-0.0126
0.0531
-0.2009
-0.2212
-0.0022
0.0396
-0.0029
-0.1377
0.0226
0.0195
-0.1747
0.0619
-0.1538
-0.0896
0.0758
-0.0461
0.0011
-0.0787
-0.0877
-0.2203
-0.0431
0.0743
-0.1289
-0.0960
-0.0841
0.0581
-0.1059
-0.1513
-0.0834
0.0357
-0.0917
0.0641
0.0595
-0.1210
-0.0276
0.0376
-0.0100
-0.1370
-0.0962
-0.2814
-0.1033
-0.0437
-0.0338
-0.0591
-0.0691
-0.0883
-0.0485
0.0748
-0.0378
-0.0478
0.0189
-0.0546
-0.0145
0.0332
-0.0593
0.0225
-0.1374
-0.1181
0.0559
0.0222
-0.0021
-0.0954
-0.0417
0.0799
-0.1447
-0.0297
-0.0955
0.0598
0.0732
-0.0074
0.0402
-0.0222
0.0747
0.0112
0.1270
-0.0274
-0.0054
0.0149
-0.0263
-0.0373
-0.0971
0.0749
-0.1377
-0.1877
-0.0638
-0.1487
-0.0099
-0.0275
0.0011
-0.0404
0.0556
-0.1120
-0.1673
0.0402
-0.1795
0.0676
0.0220
-0.0813
0.0919
-0.0402
0.0192
0.0033
-0.0220
-0.1673
-0.1087
-0.1025
0.0476
-0.1374
0.0058
-0.0772
-0.0445
0.0144
-0.1149
[torch.FloatTensor of size 256]
), (u'layer3.0.downsample.1.running_var',
1.00000e-02 *
1.3212
1.3382
0.6095
2.3173
1.0613
1.3040
1.8071
2.2225
0.5863
0.8064
0.9384
2.0050
1.3193
4.3016
1.6614
0.8651
1.6540
2.5693
0.8004
0.5019
2.7886
0.3852
0.7347
1.5154
2.1317
2.9001
2.1060
0.6044
1.1943
1.3458
1.3165
2.4103
1.0533
1.7815
1.4061
2.1802
0.9886
1.1141
1.2210
1.3257
1.8708
1.6071
1.4969
1.2958
1.1608
0.9993
0.6997
2.5080
1.0482
1.4110
1.8211
2.3854
2.4584
2.0267
1.9324
1.6686
2.3925
1.3504
1.9314
2.4688
2.2756
1.3571
1.4856
2.4605
2.7555
2.8775
0.8696
0.6599
1.0593
1.0700
0.9298
1.9956
2.2161
2.2084
1.6220
1.1626
1.5926
0.7989
1.0761
1.2732
2.0618
1.4437
1.5537
1.8481
2.0876
0.9675
0.4506
1.4136
0.8490
1.2861
1.2262
3.4415
1.4798
0.5757
0.8053
0.8657
1.2911
2.3058
1.4168
0.9986
1.1963
1.5742
2.0729
0.9315
1.1655
1.1168
1.1475
1.6534
1.7462
0.9781
1.7381
1.0939
1.1043
1.3494
0.7770
0.5724
1.8667
1.0200
1.1307
0.8269
0.9879
1.1117
1.4385
2.3246
2.5138
0.9115
1.0578
0.7320
1.6562
1.4827
0.5685
3.4491
1.9896
2.0521
0.9075
1.2675
1.0924
2.6108
1.0890
0.6949
2.2735
0.7547
1.6364
1.0205
0.7436
1.3534
1.5288
2.5751
0.7504
0.9168
2.3543
0.7190
0.8317
1.2518
1.4798
1.1023
1.5361
0.4985
0.6109
0.9817
1.3473
1.6659
2.8802
0.3228
1.5283
1.3065
1.9141
1.0007
2.0190
1.4825
1.1234
0.6844
1.4365
0.9743
2.1293
1.5214
1.3259
0.6814
1.2939
0.6188
0.6421
0.7311
2.2736
0.8418
1.9729
1.0084
1.9774
2.2133
0.7438
1.1675
2.6822
0.6403
0.8460
1.2970
1.1952
0.9338
1.1365
1.6780
0.8185
1.0743
1.6519
2.0700
0.9490
1.9632
1.3310
1.1290
0.5067
1.5383
1.1103
2.8381
1.1821
1.1883
1.5355
2.1017
1.1177
1.0712
0.7522
1.4946
1.2895
0.5904
0.4068
2.0167
1.5424
1.0638
0.3425
1.1384
3.0723
0.7040
3.0110
1.8263
0.6956
0.5911
0.8097
1.8654
0.6678
1.8846
1.3750
0.6798
0.8424
1.5255
1.4950
1.3587
2.0014
1.5513
0.9465
0.5946
2.3052
1.8731
0.9167
0.9334
1.4370
1.1357
1.1465
1.0277
1.0530
1.7208
[torch.FloatTensor of size 256]
), (u'layer3.0.downsample.1.weight', Parameter containing:
0.0674
0.0514
0.0385
0.1692
0.0604
0.0460
0.1209
0.1110
0.0418
0.0387
0.0442
0.0707
0.0790
0.1094
0.0959
0.0544
0.1032
0.2190
0.0459
0.0372
0.1410
0.0587
0.0360
0.0955
0.1657
0.1024
0.1417
0.0580
0.0536
0.0716
0.0865
0.1110
0.0511
0.0515
0.0809
0.1154
0.0777
0.0449
0.0490
0.1056
0.1457
0.0744
0.0530
0.0600
0.1026
0.0486
0.0408
0.1312
0.0639
0.1062
0.0915
0.1476
0.0900
0.0742
0.1069
0.0776
0.1423
0.0495
0.0974
0.0661
0.1292
0.0548
0.1145
0.0950
0.0921
0.1579
0.0496
0.0236
0.0398
0.0935
0.0291
0.0653
0.0885
0.1190
0.1692
0.0692
0.1316
0.0606
0.0480
0.0654
0.1082
0.0624
0.1103
0.1106
0.1076
0.0400
0.0723
0.0947
0.0662
0.0464
0.0444
0.1727
0.0921
0.0345
0.0451
0.0374
0.0940
0.0818
0.0397
0.0452
0.0985
0.1095
0.1072
0.0506
0.0444
0.0755
0.0420
0.1046
0.1172
0.0447
0.0459
0.0409
0.0539
0.1036
0.0741
0.0311
0.1086
0.1746
0.0777
0.0689
0.1100
0.0489
0.1048
0.1097
0.1025
0.0448
0.0675
0.0707
0.1364
0.0438
0.0346
0.1769
0.0667
0.1155
0.0628
0.0873
0.0406
0.2890
0.0703
0.0428
0.1173
0.1049
0.0611
0.0469
0.0400
0.0744
0.1003
0.1012
0.0599
0.1078
0.1512
0.0322
0.0430
0.0977
0.0951
0.0838
0.0958
0.0448
0.0263
0.0425
0.1154
0.0771
0.1781
0.0300
0.0699
0.0724
0.1600
0.0893
0.1130
0.0534
0.1359
0.0375
0.0809
0.1145
0.1232
0.0942
0.0880
0.0346
0.0996
0.0461
0.0694
0.0630
0.1590
0.0509
0.1254
0.0590
0.0744
0.1084
0.0514
0.0931
0.0848
0.0240
0.0279
0.0993
0.0612
0.0599
0.1095
0.0508
0.0658
0.1162
0.0833
0.1651
0.0505
0.1231
0.1228
0.1038
0.0369
0.0756
0.0415
0.1192
0.0292
0.0839
0.0577
0.0951
0.0944
0.0309
0.0390
0.0604
0.0672
0.0501
0.0383
0.0946
0.0958
0.0501
0.0243
0.1074
0.1908
0.0693
0.1376
0.1151
0.0329
0.0647
0.0616
0.1106
0.0358
0.0721
0.0851
0.0375
0.0368
0.0947
0.0464
0.1666
0.1049
0.0755
0.0398
0.0249
0.1528
0.1167
0.0886
0.0540
0.0726
0.0736
0.0797
0.0854
0.0609
0.1263
[torch.FloatTensor of size 256]
), (u'layer3.0.downsample.1.bias', Parameter containing:
-0.0264
0.0995
-0.0068
-0.0877
0.0078
0.0407
-0.0307
0.0060
0.0017
0.0478
0.0630
0.0358
-0.0504
0.0214
-0.0090
-0.0337
-0.0455
-0.1924
-0.0676
0.0775
-0.0340
-0.0799
0.1314
-0.1273
-0.0628
-0.0055
-0.0915
-0.1757
-0.0083
-0.0945
0.0025
-0.0319
-0.0158
0.1437
-0.0035
0.0108
-0.0511
0.0358
0.0878
-0.0452
-0.0458
0.0147
0.0687
0.0168
-0.0477
0.0568
0.0460
-0.0507
0.0059
-0.1034
0.0103
-0.1052
-0.0166
-0.0192
-0.0345
0.0201
-0.1362
0.0396
-0.0088
-0.0108
-0.0298
0.0721
-0.0669
-0.0094
-0.0310
-0.0267
-0.1418
0.1190
0.0669
-0.2137
0.0427
0.0478
0.0339
0.0001
-0.1482
-0.0237
-0.0743
-0.0684
-0.0201
0.0147
-0.0396
0.0194
-0.0696
-0.0558
0.0080
0.0236
-0.2578
0.0064
-0.1004
0.0280
0.0152
-0.0484
-0.1536
0.1049
0.0499
0.0657
-0.0541
0.0077
0.0941
-0.0200
-0.2356
-0.0623
0.0334
0.1102
0.0770
-0.0325
0.0481
-0.1499
-0.1650
0.1230
0.0712
0.0589
-0.0482
-0.0972
-0.1860
0.0853
-0.0516
-0.3080
-0.0604
-0.0771
-0.2728
0.0289
-0.1328
0.0173
-0.0392
0.0542
-0.0372
-0.1528
-0.1766
0.0839
0.0693
-0.0826
0.1118
-0.0508
-0.0448
-0.0375
0.0304
-0.3782
0.0149
0.0068
-0.0521
-0.2950
0.0899
0.0296
0.0199
-0.0835
-0.0964
-0.0238
0.0349
-0.2663
-0.1618
0.0736
0.0276
-0.1109
-0.0103
-0.0975
0.0140
0.0108
0.0784
0.0131
-0.0395
0.0248
-0.0774
-0.0284
0.0104
-0.0423
-0.1663
-0.0949
-0.0343
0.0455
-0.3000
-0.0069
0.0141
-0.2615
-0.0736
-0.1063
-0.0105
-0.0712
-0.1034
-0.0298
-0.1428
-0.0517
-0.0571
-0.0544
-0.0423
-0.0085
0.0159
-0.0654
-0.0613
-0.1450
0.0399
0.0816
-0.0078
-0.0341
0.0320
-0.0448
-0.0703
0.1021
-0.1799
-0.2117
-0.0598
-0.1160
0.0393
-0.0454
-0.1845
-0.1085
0.0558
-0.0636
0.0168
0.0002
0.0799
-0.0672
0.0798
-0.0040
-0.1902
0.0200
0.0732
0.1032
-0.0264
0.0240
-0.0442
0.0229
0.0234
-0.0235
0.0105
-0.2149
-0.1281
-0.0183
-0.0006
-0.0516
0.0566
-0.0543
0.0141
-0.0499
0.0673
0.0517
-0.0040
0.0351
0.0828
0.0100
0.0592
-0.2043
-0.0762
0.0414
0.0775
0.0760
-0.1592
-0.0836
-0.1663
0.0023
-0.0685
0.0381
-0.0987
-0.0203
0.0154
-0.1055
[torch.FloatTensor of size 256]
), (u'layer3.1.conv1.weight', Parameter containing:
( 0 , 0 ,.,.) =
4.8367e-02 4.8045e-02 3.8471e-02
4.9888e-02 5.5208e-02 5.6701e-02
2.4192e-02 1.3436e-02 2.4655e-02
( 0 , 1 ,.,.) =
-3.6542e-03 -3.1100e-03 4.9227e-03
-1.2114e-03 3.4020e-03 1.9846e-02
-2.1704e-02 -2.1158e-02 -2.8686e-03
( 0 , 2 ,.,.) =
-1.2536e-02 -2.0486e-02 -2.3154e-02
-1.3515e-02 -2.3781e-02 -2.5515e-02
1.0584e-02 7.2999e-03 -5.2329e-03
...
( 0 ,253,.,.) =
-4.3596e-02 -1.8328e-02 -5.0577e-02
1.6590e-02 5.0719e-02 2.1919e-02
-1.9203e-02 -8.8315e-03 -2.0335e-02
( 0 ,254,.,.) =
-7.6949e-03 -1.5848e-02 1.5841e-03
-6.2470e-03 -1.3135e-02 6.9092e-03
-3.3791e-03 1.7889e-03 3.7373e-03
( 0 ,255,.,.) =
-6.6310e-03 5.8503e-03 -5.8571e-04
-2.4600e-02 -8.9747e-03 -7.2466e-03
-1.7566e-02 -8.5829e-03 -7.5220e-03
⋮
( 1 , 0 ,.,.) =
-2.3679e-02 -9.4399e-03 -1.1688e-02
-2.4777e-02 -1.7326e-02 -3.1489e-02
-3.3683e-03 9.7571e-03 -5.1527e-03
( 1 , 1 ,.,.) =
-3.0809e-02 -4.0685e-02 -2.2731e-02
-5.1065e-03 -1.6457e-02 -1.8804e-02
5.0382e-02 5.2054e-02 3.9185e-02
( 1 , 2 ,.,.) =
-3.7790e-02 -4.2234e-02 -2.9703e-02
-6.4766e-03 2.6967e-03 -8.1736e-03
3.7747e-02 5.5416e-02 2.5806e-02
...
( 1 ,253,.,.) =
-2.7275e-02 -4.5364e-02 -3.9567e-02
8.9827e-03 1.6150e-02 1.1675e-02
-9.7209e-03 -3.6449e-02 -1.6842e-02
( 1 ,254,.,.) =
1.7824e-02 1.5013e-02 1.0225e-02
5.4044e-03 1.1664e-02 6.4623e-03
2.1803e-02 4.1795e-02 1.9234e-02
( 1 ,255,.,.) =
-2.6730e-04 1.5218e-03 -5.0352e-03
2.5761e-02 2.7110e-02 -9.3395e-04
-1.1949e-02 -7.5204e-03 -3.9370e-03
⋮
( 2 , 0 ,.,.) =
-1.7447e-02 -1.8358e-02 -2.6020e-02
-1.4074e-02 -1.1302e-02 -1.4814e-02
-3.1460e-03 -1.8674e-02 -9.3350e-03
( 2 , 1 ,.,.) =
-5.1125e-03 -4.8036e-03 1.8139e-02
-1.0524e-02 -1.5152e-02 2.3904e-03
8.7093e-03 9.3810e-03 2.4203e-03
( 2 , 2 ,.,.) =
-7.6392e-03 -8.1496e-03 -1.5331e-02
-8.0622e-03 -1.3383e-02 -1.3938e-02
-1.6904e-02 -3.0059e-02 -1.8659e-02
...
( 2 ,253,.,.) =
1.8390e-02 -2.6080e-03 9.3782e-03
-6.4662e-04 -1.3146e-02 1.0045e-02
-2.2293e-03 -1.4097e-02 1.7385e-02
( 2 ,254,.,.) =
3.0293e-04 2.9622e-03 1.0030e-02
-5.7588e-03 -1.6943e-03 6.9988e-03
9.8134e-03 1.4197e-02 5.9742e-03
( 2 ,255,.,.) =
2.8753e-03 -1.7814e-03 1.0873e-02
1.5230e-02 4.5867e-03 1.6860e-02
1.9536e-03 1.9503e-02 1.2168e-02
...
⋮
(253, 0 ,.,.) =
1.3983e-02 2.4598e-03 -7.4604e-03
-2.2250e-02 -1.2757e-02 -2.8846e-03
-1.0911e-02 7.5499e-03 8.6910e-03
(253, 1 ,.,.) =
-4.8463e-03 -8.3250e-03 1.3420e-02
-6.2502e-03 -7.3982e-03 1.1153e-02
4.0391e-03 -9.0354e-03 -7.5441e-03
(253, 2 ,.,.) =
-5.1627e-03 -8.9529e-03 -1.2414e-02
-4.9261e-03 -3.5488e-03 2.1501e-03
-1.1709e-02 -1.4984e-02 -1.9216e-03
...
(253,253,.,.) =
1.5428e-02 -7.6036e-04 -1.3522e-03
-3.4856e-02 -7.4478e-04 -6.5064e-03
-9.1655e-03 -2.8467e-02 -4.8924e-02
(253,254,.,.) =
1.2207e-02 1.0519e-02 -8.4421e-03
-2.5495e-02 2.8140e-03 1.6165e-03
-1.8831e-02 1.2268e-02 1.5439e-02
(253,255,.,.) =
-1.3684e-02 -4.1732e-03 1.2609e-02
-6.8834e-04 5.9757e-03 -1.0183e-02
2.1559e-04 -1.3462e-02 -3.0114e-02
⋮
(254, 0 ,.,.) =
-1.6186e-02 -6.4926e-02 -4.3146e-02
-2.1790e-02 -4.9106e-02 -3.4568e-02
4.0506e-02 4.2449e-02 6.1562e-02
(254, 1 ,.,.) =
3.5715e-03 -1.0916e-02 -2.2922e-02
-2.4831e-03 6.4555e-03 -1.1316e-02
1.6662e-03 -1.9145e-02 -2.3007e-02
(254, 2 ,.,.) =
-7.1243e-03 -4.2783e-05 4.9363e-03
-1.5832e-02 4.0474e-03 4.5135e-04
-4.7967e-03 -7.2164e-04 -1.7230e-02
...
(254,253,.,.) =
1.1589e-02 7.7814e-04 6.3205e-03
1.1360e-02 -6.2076e-03 -2.7689e-02
2.6392e-02 2.3775e-03 -1.4937e-02
(254,254,.,.) =
-1.1237e-02 -2.6285e-03 9.1537e-03
-8.2120e-03 -2.2236e-02 3.2917e-04
5.5909e-03 -1.3858e-03 6.8947e-03
(254,255,.,.) =
-1.4783e-02 -1.0367e-02 -2.7472e-02
-4.1090e-02 -3.8532e-02 -3.9202e-02
-2.1614e-02 -3.4340e-02 -1.8542e-02
⋮
(255, 0 ,.,.) =
-1.9492e-02 -1.6098e-02 -3.1792e-02
2.5374e-02 4.6815e-02 2.7513e-02
3.5903e-02 3.1892e-02 2.6156e-02
(255, 1 ,.,.) =
1.6856e-02 1.5645e-02 1.4189e-02
2.2550e-02 3.0456e-02 1.6739e-02
-2.3615e-04 -7.9501e-03 -1.9666e-03
(255, 2 ,.,.) =
-7.9060e-03 -4.7390e-03 1.6030e-03
1.3802e-03 -8.5837e-03 6.9451e-03
1.1407e-02 -5.9877e-03 1.3759e-02
...
(255,253,.,.) =
4.0124e-03 2.9951e-02 1.1915e-02
-4.3412e-02 -3.1776e-03 -2.7705e-02
-1.6183e-02 -1.1247e-02 -3.5084e-02
(255,254,.,.) =
2.9837e-02 5.9935e-02 2.4631e-02
-1.9571e-03 2.2415e-02 -1.5499e-02
1.6075e-02 1.7850e-02 -1.8412e-02
(255,255,.,.) =
-4.3712e-03 -4.9032e-02 -2.1335e-02
-5.2598e-03 -2.8579e-02 -2.2090e-02
8.5126e-03 2.0862e-03 2.3301e-02
[torch.FloatTensor of size 256x256x3x3]
), (u'layer3.1.bn1.running_mean',
-0.1025
-0.2592
-0.0965
-0.3407
-0.7097
-0.6031
-0.2141
-0.9031
-0.4035
-0.6407
-0.2497
-0.3583
-0.4565
-0.6490
-0.5170
-0.1349
-0.4850
-0.4739
-0.3153
-0.8209
-0.4225
-0.6206
-0.4559
-0.3368
-0.4792
-0.1458
-0.2748
-0.3980
-0.7130
-0.8394
-0.2738
-0.2684
-0.7559
-0.1212
-0.4390
-0.6971
-0.5034
-0.5667
-0.0881
-0.5308
-1.0779
-0.4657
-0.1016
-0.3251
0.5384
-0.7573
-0.4718
-0.3475
-0.7198
0.0478
-0.1387
-0.8780
-0.4057
-0.1151
0.0992
-0.3888
-1.0015
-0.3866
-0.3267
-0.7324
0.0908
-0.2760
-0.2759
-0.1892
-0.5036
-0.7406
-0.5314
-0.4804
-0.6063
-0.0027
-0.9185
-0.2444
-1.1849
-0.5390
0.0826
-0.5177
-0.2814
-0.9467
-0.3946
-0.2202
-0.9325
-0.2205
-0.5632
-0.3165
-0.3471
-0.5694
-0.5109
-0.5890
-0.7838
-0.0023
-0.2396
-0.1672
-0.8411
-0.7307
-0.7261
-0.9349
-0.3716
-0.3562
-0.5137
0.0200
-0.1683
-0.5633
-0.7860
0.0991
-0.4193
-0.2072
0.3579
-0.4102
-0.3668
-0.4049
-0.9005
-0.2777
-0.2725
0.3941
-0.5075
-0.4530
-0.1478
-0.0221
-0.8574
-0.3104
-0.3454
-0.6428
-0.2709
-0.8565
-0.6260
0.0389
-0.2254
-0.0605
-0.5984
-0.3643
-0.3485
0.0256
-0.6499
-0.3053
-0.2398
-0.3982
-0.7215
-0.6537
0.0768
-0.4554
-0.5362
-0.1021
-0.6464
-0.2658
-0.4985
-0.4319
-0.3855
-0.3943
-0.5918
-0.2335
-0.6772
-0.5162
0.3806
-0.3896
-0.7292
-0.2207
-0.1687
-0.6623
0.2850
0.1726
-0.1974
-0.5586
-0.2524
0.1773
-0.6096
-0.2281
-0.4691
-0.8133
0.0092
-0.4920
-0.1768
0.1288
-0.0693
-0.8397
0.2613
-0.2188
-0.3190
-0.4868
-0.6609
-0.7675
-0.7933
-0.9342
-0.0691
-0.6616
-0.3705
-0.4538
-0.9279
0.3324
-0.4950
0.2669
-0.8139
-0.7909
-0.7892
-0.4672
-0.5917
-0.5463
-0.2896
-0.0226
-0.2722
-0.3747
0.1202
-0.0338
0.1891
-0.2435
-0.0226
-0.7646
-0.4016
-0.3116
1.5396
-0.1373
-0.7532
-0.6100
-0.2439
-0.5319
-0.6603
-0.3199
-0.8471
-0.0484
0.0830
-0.4584
-0.3889
-0.8163
-0.3322
-0.6670
-0.7744
-0.7010
0.2128
-0.5624
-1.0360
-0.3098
0.7000
-0.3580
-0.3484
-0.5161
-0.2167
-0.8903
-0.3402
-0.4037
-0.5501
-0.4197
-0.5290
-0.4974
0.0083
-0.0166
-0.8607
-0.5581
-0.5161
-0.3668
-0.7363
-0.3654
-0.2788
-0.3890
-0.1521
-0.3327
-0.7903
-0.2531
[torch.FloatTensor of size 256]
), (u'layer3.1.bn1.running_var',
0.2021
0.2649
0.0816
0.1141
0.1276
0.1230
0.1780
0.1393
0.1235
0.1178
0.1281
0.1505
0.1060
0.0954
0.1147
0.1260
0.0831
0.2214
0.1452
0.1260
0.1203
0.1507
0.1066
0.3038
0.1444
0.1266
0.1966
0.1145
0.1376
0.0991
0.1222
0.1343
0.1066
0.1148
0.1946
0.1589
0.1715
0.1588
0.1577
0.0995
0.1036
0.1305
0.1547
0.1623
0.1437
0.1624
0.0856
0.2183
0.1339
0.0807
0.1528
0.1277
0.1413
0.1200
0.2567
0.1202
0.1523
0.1513
0.1002
0.1453
0.1620
0.1270
0.1179
0.1004
0.2034
0.1578
0.1785
0.1181
0.0674
0.2460
0.1251
0.1144
0.1670
0.1460
0.1625
0.1203
0.1697
0.1065
0.1415
0.1694
0.0909
0.1133
0.1569
0.0880
0.1333
0.1711
0.2421
0.1188
0.0882
0.1084
0.1373
0.2886
0.1736
0.1740
0.1512
0.1086
0.1211
0.1523
0.1453
0.1735
0.1515
0.1348
0.2445
0.1433
0.1422
0.1520
0.0985
0.1292
0.1372
0.2228
0.1265
0.1538
0.1600
0.1121
0.1922
0.1195
0.1100
0.1151
0.1431
0.1258
0.1416
0.1049
0.1840
0.1158
0.1111
0.2187
0.1193
0.1541
0.1074
0.1350
0.1385
0.0990
0.1418
0.1837
0.1667
0.1712
0.1567
0.1542
0.1501
0.1585
0.1422
0.1527
0.1179
0.1882
0.1856
0.1549
0.1798
0.2879
0.1156
0.1749
0.1297
0.1522
0.1308
0.2123
0.1579
0.0937
0.1310
0.2052
0.1510
0.1542
0.1416
0.1203
0.1372
0.1980
0.1352
0.2065
0.1385
0.1358
0.1696
0.2816
0.1058
0.0886
0.1123
0.2269
0.1117
0.1080
0.2029
0.1026
0.1150
0.1452
0.1180
0.1690
0.2079
0.1133
0.1933
0.1454
0.0872
0.1002
0.1101
0.1757
0.1967
0.0907
0.1175
0.1508
0.1348
0.1203
0.1413
0.2189
0.1030
0.1387
0.1669
0.1090
0.1632
0.0875
0.1349
0.1074
0.2839
0.1628
0.1872
0.1538
0.0973
0.1831
0.2120
0.1716
0.0890
0.1869
0.0877
0.1300
0.2792
0.1565
0.1605
0.1427
0.1366
0.1441
0.1080
0.1166
0.1286
0.2491
0.1250
0.1469
0.1387
0.1164
0.1255
0.0991
0.0942
0.1549
0.2844
0.1235
0.1733
0.1828
0.0891
0.1104
0.1286
0.1495
0.1249
0.4629
0.1380
0.1015
0.1064
0.1612
0.2148
0.1442
0.1714
0.1601
0.2572
0.1879
[torch.FloatTensor of size 256]
), (u'layer3.1.bn1.weight', Parameter containing:
0.2480
0.1972
0.2279
0.2709
0.3296
0.2640
0.2710
0.3475
0.2388
0.2904
0.2769
0.3045
0.2268
0.2634
0.2999
0.2397
0.2724
0.2723
0.2133
0.3806
0.2767
0.2403
0.2406
0.2917
0.2675
0.2305
0.2394
0.3123
0.2984
0.3353
0.2234
0.1919
0.3168
0.2626
0.2901
0.2918
0.3455
0.2561
0.2434
0.2298
0.3318
0.3481
0.2032
0.2478
0.2478
0.2483
0.3252
0.2567
0.2685
0.1977
0.2541
0.4079
0.2480
0.2076
0.2276
0.2683
0.2098
0.2056
0.2010
0.3560
0.2384
0.3284
0.1952
0.2445
0.2848
0.3742
0.2746
0.2117
0.3859
0.4785
0.3005
0.2848
0.3762
0.2903
0.2126
0.1776
0.2778
0.3878
0.3123
0.1974
0.2679
0.2300
0.2474
0.2320
0.2635
0.2819
0.2296
0.3194
0.3814
0.2503
0.2269
0.2676
0.3431
0.3799
0.3787
0.2968
0.3021
0.2575
0.3007
0.1939
0.1950
0.3217
0.3623
0.2171
0.2486
0.2266
0.2133
0.2851
0.2715
0.2720
0.3107
0.2174
0.2675
0.2387
0.3434
0.2761
0.2084
0.2975
0.3178
0.2818
0.2858
0.3498
0.2675
0.2638
0.3159
0.2879
0.1873
0.2986
0.3584
0.2570
0.1815
0.2758
0.2640
0.2486
0.2567
0.2252
0.3420
0.2910
0.2898
0.2902
0.2404
0.2381
0.3633
0.2690
0.3810
0.2947
0.2743
0.4644
0.3133
0.2444
0.3477
0.3001
0.1977
0.2301
0.2513
0.2660
0.3271
0.1622
0.2274
0.2225
0.3596
0.3215
0.1997
0.2215
0.2706
0.2831
0.2621
0.3710
0.2730
0.2903
0.1893
0.2140
0.2460
0.3141
0.2424
0.3699
0.2364
0.2420
0.2948
0.2497
0.2760
0.2686
0.2895
0.3857
0.1398
0.2832
0.3362
0.2522
0.2823
0.2381
0.2311
0.3274
0.4078
0.2648
0.2525
0.3388
0.3251
0.2420
0.2856
0.3605
0.2603
0.2294
0.2483
0.2171
0.2353
0.4117
0.2588
0.2888
0.1972
0.2408
0.2755
0.3031
0.2457
0.2744
0.3564
0.2546
0.3673
0.2883
0.2590
0.3021
0.2890
0.3505
0.2092
0.2953
0.3222
0.2925
0.2574
0.3012
0.3893
0.2211
0.2226
0.3258
0.3205
0.2975
0.2323
0.3323
0.2812
0.2702
0.2300
0.2846
0.3318
0.2292
0.3498
0.2622
0.3581
0.4003
0.2924
0.3049
0.3478
0.2845
0.2742
0.2019
0.2466
0.2988
0.2044
0.2691
[torch.FloatTensor of size 256]
), (u'layer3.1.bn1.bias', Parameter containing:
-0.1332
-0.0644
-0.3239
-0.2390
-0.3262
-0.1796
-0.2087
-0.3208
-0.1874
-0.2988
-0.2099
-0.2283
-0.2141
-0.2460
-0.2768
-0.1351
-0.2498
-0.2393
-0.1223
-0.4590
-0.2172
-0.1220
-0.2101
-0.1779
-0.2426
-0.1546
-0.1549
-0.3716
-0.2817
-0.3886
-0.1545
-0.0687
-0.3412
-0.2261
-0.1961
-0.2242
-0.2984
-0.1381
-0.2251
-0.1658
-0.4534
-0.3226
-0.0977
-0.1349
-0.2619
-0.1428
-0.3960
-0.1633
-0.2101
-0.1161
-0.1448
-0.5502
-0.2179
-0.1246
0.0502
-0.1902
-0.1047
-0.1000
-0.1411
-0.3124
-0.2190
-0.3062
-0.1247
-0.1557
-0.2973
-0.3825
-0.1951
-0.1381
-0.5761
-0.3879
-0.2808
-0.2542
-0.3470
-0.2460
-0.1091
-0.0562
-0.1833
-0.4956
-0.3059
-0.0988
-0.2255
-0.1958
-0.1320
-0.1738
-0.2287
-0.1926
-0.0924
-0.3427
-0.5489
-0.2431
-0.1935
-0.1641
-0.2503
-0.3274
-0.4008
-0.2824
-0.2694
-0.1939
-0.2413
-0.0309
-0.0880
-0.3421
-0.3104
-0.1102
-0.1539
-0.1233
-0.1780
-0.2715
-0.2005
-0.1846
-0.2843
-0.1117
-0.1816
-0.2119
-0.3304
-0.2267
-0.1413
-0.3376
-0.2674
-0.2524
-0.2554
-0.4735
-0.2342
-0.2130
-0.3282
-0.1966
-0.1063
-0.2615
-0.4234
-0.1374
-0.0811
-0.3069
-0.1538
-0.1453
-0.1612
-0.1631
-0.3759
-0.2608
-0.2382
-0.2499
-0.1485
-0.1487
-0.4328
-0.1377
-0.2781
-0.2259
-0.2072
-0.4165
-0.3582
-0.1382
-0.3598
-0.2672
-0.2090
-0.0177
-0.1279
-0.2812
-0.3621
0.0476
-0.2232
-0.1272
-0.3237
-0.3008
-0.1119
-0.0839
-0.2426
-0.2000
-0.1873
-0.4685
-0.2000
-0.3462
-0.0706
-0.1973
-0.3548
-0.1975
-0.3537
-0.3546
-0.1433
-0.2052
-0.2722
-0.1528
-0.2798
-0.1945
-0.2474
-0.4910
0.1322
-0.2378
-0.5166
-0.3959
-0.2354
-0.1266
-0.0810
-0.4132
-0.5576
-0.2238
-0.1563
-0.3950
-0.3283
-0.0846
-0.3103
-0.3130
-0.1498
-0.1396
-0.0972
-0.1620
-0.1631
-0.6364
-0.1350
-0.2345
-0.1049
-0.1625
-0.2878
-0.2450
-0.1468
-0.2035
-0.5358
-0.1683
-0.5524
-0.2511
-0.1230
-0.2305
-0.1925
-0.3759
-0.1014
-0.1697
-0.4002
-0.2980
-0.3035
-0.1563
-0.4660
-0.1155
-0.1665
-0.3382
-0.2935
-0.3122
-0.3015
-0.3261
-0.2542
-0.2037
-0.0955
-0.2070
-0.4370
-0.2051
-0.4205
-0.3125
-0.4845
-0.3528
-0.2624
-0.2894
-0.3976
-0.2107
-0.1791
-0.1075
-0.1213
-0.3022
0.0516
-0.1928
[torch.FloatTensor of size 256]
), (u'layer3.1.conv2.weight', Parameter containing:
( 0 , 0 ,.,.) =
-4.2568e-02 -2.6148e-02 -2.2019e-02
-1.7334e-02 -7.5950e-03 -7.2384e-03
-1.7876e-03 2.3800e-02 1.4873e-02
( 0 , 1 ,.,.) =
-2.8277e-03 -5.0644e-03 -4.9442e-03
1.2117e-03 1.4908e-02 1.6013e-02
1.4391e-02 3.3109e-02 5.0061e-02
( 0 , 2 ,.,.) =
-3.4891e-03 -4.4437e-03 2.6589e-03
1.5105e-02 2.6303e-02 2.6802e-02
3.9232e-02 5.0057e-02 4.6637e-02
...
( 0 ,253,.,.) =
2.2877e-02 1.5454e-02 -2.4483e-02
3.1145e-02 3.4944e-02 1.3296e-02
-1.7674e-04 7.3297e-03 -5.7174e-03
( 0 ,254,.,.) =
-2.1781e-02 -3.7379e-02 -1.3382e-02
1.8976e-02 1.4155e-02 -6.5395e-03
2.6831e-02 3.6354e-02 1.1450e-02
( 0 ,255,.,.) =
3.1603e-02 3.3933e-02 3.1575e-02
-1.0098e-02 -1.2657e-02 1.1674e-02
1.0325e-02 7.9424e-05 1.5911e-02
⋮
( 1 , 0 ,.,.) =
2.5937e-02 6.2590e-03 6.0798e-03
-4.5745e-03 -3.5188e-02 -2.9249e-02
2.1366e-02 2.0480e-03 6.2699e-03
( 1 , 1 ,.,.) =
-2.9549e-03 -1.3679e-03 -8.6876e-03
7.9988e-03 1.2888e-03 -5.9629e-03
-9.1481e-03 -2.1914e-02 -4.1572e-02
( 1 , 2 ,.,.) =
7.6390e-03 3.0253e-03 2.7817e-04
7.0329e-03 1.1914e-02 -2.4419e-03
-8.2131e-03 -9.7848e-05 -1.9223e-02
...
( 1 ,253,.,.) =
-4.4498e-03 5.1611e-03 3.7416e-03
3.2110e-04 8.3762e-03 3.6612e-03
9.3343e-03 8.1829e-03 1.1234e-03
( 1 ,254,.,.) =
-6.6849e-02 -5.9871e-02 -3.3931e-02
2.2337e-02 3.1932e-02 3.7244e-02
9.3296e-03 3.7222e-02 1.4052e-02
( 1 ,255,.,.) =
-2.0643e-03 1.2408e-02 -3.1072e-03
-8.2882e-03 1.3917e-02 -2.0680e-02
-1.9329e-02 1.1953e-02 -2.3436e-02
⋮
( 2 , 0 ,.,.) =
-1.7788e-03 -3.5982e-03 -1.2592e-03
-1.5320e-02 -1.0690e-02 -2.0311e-02
-3.4649e-04 -2.2188e-03 -1.5021e-02
( 2 , 1 ,.,.) =
-2.8952e-02 -3.3958e-02 -2.5437e-02
-1.5919e-04 1.5204e-02 3.4554e-02
3.6892e-02 7.0144e-02 7.3610e-02
( 2 , 2 ,.,.) =
1.0721e-02 2.1531e-03 -5.6155e-03
1.1754e-02 -4.8546e-03 -5.5013e-03
-3.7388e-04 -9.7639e-03 -1.5029e-02
...
( 2 ,253,.,.) =
1.5622e-02 9.8976e-03 3.4725e-03
1.4711e-02 7.0707e-03 -9.1826e-03
7.0986e-03 6.3087e-03 -3.5893e-03
( 2 ,254,.,.) =
-6.4518e-03 -6.7673e-03 1.1635e-02
1.4707e-02 2.3831e-02 4.9396e-02
1.8897e-02 3.4981e-02 4.5488e-02
( 2 ,255,.,.) =
1.5900e-02 3.3369e-02 2.6194e-02
1.0616e-02 1.8515e-02 3.0190e-03
1.1004e-02 2.5503e-02 1.3654e-02
...
⋮
(253, 0 ,.,.) =
-2.1231e-02 -1.2804e-02 -1.5498e-02
7.6750e-03 1.2120e-02 1.5099e-02
1.8536e-02 2.5110e-02 2.5283e-02
(253, 1 ,.,.) =
7.4059e-03 -3.0540e-03 -1.5475e-03
-8.4415e-03 -2.2002e-02 -3.4099e-03
9.1918e-03 2.2617e-03 -1.4260e-02
(253, 2 ,.,.) =
-5.2568e-03 -5.3507e-03 -3.2230e-03
-1.5805e-02 6.0508e-03 -1.5917e-03
-8.9323e-03 2.6483e-03 5.0508e-03
...
(253,253,.,.) =
1.9826e-02 -2.1209e-03 1.4889e-02
5.7275e-02 3.5549e-02 6.0175e-03
2.3347e-02 -2.2153e-02 -2.5497e-02
(253,254,.,.) =
-1.3985e-02 -6.4766e-02 -1.7286e-02
1.1704e-02 1.0714e-02 4.6278e-02
-1.0038e-02 -3.5707e-03 2.2691e-02
(253,255,.,.) =
-8.3342e-03 -1.3070e-03 -1.0049e-02
3.2605e-02 5.3259e-02 2.2172e-02
3.7339e-02 6.1155e-02 4.4555e-02
⋮
(254, 0 ,.,.) =
-1.6584e-02 -1.3850e-02 -1.4604e-02
-1.7604e-02 -2.1268e-02 -1.6734e-02
-6.0039e-04 3.8569e-03 1.2837e-02
(254, 1 ,.,.) =
1.7623e-02 2.3706e-02 2.7633e-02
-2.2841e-02 -1.9576e-02 -1.6551e-02
-8.0822e-03 4.3779e-03 -5.3622e-03
(254, 2 ,.,.) =
1.5582e-02 3.7879e-02 2.3555e-02
-6.4632e-03 9.8620e-03 1.2121e-02
-1.3743e-02 -6.1246e-03 -2.7332e-03
...
(254,253,.,.) =
-2.5037e-03 -1.2064e-02 -9.0989e-03
-4.7911e-04 -2.8339e-03 2.1365e-03
-6.2077e-03 -2.6615e-03 1.1215e-02
(254,254,.,.) =
-8.1794e-03 -2.2417e-02 -3.4012e-02
-2.8553e-02 -2.9546e-02 -4.4372e-02
-5.0348e-02 -3.4973e-02 -5.2028e-02
(254,255,.,.) =
1.4728e-02 3.2834e-02 2.6312e-02
1.3449e-02 2.6407e-02 2.6924e-02
2.5572e-02 3.4316e-02 2.6184e-02
⋮
(255, 0 ,.,.) =
7.2026e-03 -2.3931e-03 2.2182e-03
4.2555e-03 -6.4084e-03 7.8548e-03
2.0510e-02 1.8644e-02 2.3280e-02
(255, 1 ,.,.) =
-1.2471e-02 1.3008e-02 1.0010e-02
-1.7496e-03 6.1331e-03 4.3366e-03
5.2269e-03 1.5111e-02 -8.1881e-03
(255, 2 ,.,.) =
-3.7337e-02 1.9923e-02 -2.4149e-02
-4.9487e-02 -1.0510e-02 -4.2107e-02
-5.7684e-03 -4.8632e-03 -1.8332e-02
...
(255,253,.,.) =
7.2013e-03 -1.5208e-02 -1.6507e-02
-8.8276e-03 -1.8698e-02 -1.6637e-03
-1.2015e-02 2.9667e-03 6.2300e-03
(255,254,.,.) =
-1.8341e-02 -9.0521e-03 2.6030e-02
3.5930e-02 5.3049e-02 5.8487e-02
-1.3661e-02 -3.6888e-03 -7.1606e-03
(255,255,.,.) =
-1.2594e-02 -4.0898e-02 1.7162e-03
-1.7420e-02 -4.3435e-02 -1.3183e-02
-3.7506e-02 -5.5707e-02 -3.0051e-02
[torch.FloatTensor of size 256x256x3x3]
), (u'layer3.1.bn2.running_mean',
-0.0823
-0.0332
-0.0266
-0.0132
-0.0638
-0.1047
-0.0671
-0.0530
0.0623
-0.0001
-0.0130
0.0178
-0.0493
-0.0653
-0.0985
0.2112
-0.0922
-0.1139
-0.1879
-0.0822
-0.0585
-0.5599
-0.0512
-0.1987
-0.0715
-0.0665
-0.1684
-0.1031
0.1054
-0.2306
-0.0339
-0.0807
-0.0035
0.0472
-0.0788
-0.1265
-0.0615
0.0643
0.0278
0.0031
-0.0767
-0.0951
0.0001
-0.0472
-0.0309
-0.0464
-0.0633
-0.0814
-0.0705
-0.0671
-0.0792
-0.1778
0.0551
0.4111
0.0016
-0.0799
-0.0692
-0.0498
0.0038
0.0025
0.0356
-0.0258
-0.1225
-0.0464
-0.1216
-0.0643
-0.1383
-0.0232
-0.0181
-0.1470
0.0488
-0.0557
-0.0348
-0.0566
-0.0651
-0.1047
-0.0526
-0.1851
-0.1060
0.0265
0.0717
-0.0200
-0.1123
-0.2006
-0.1032
0.0041
-0.2242
-0.1273
-0.0887
0.1082
-0.0589
-0.0834
-0.1046
0.0230
-0.0885
0.0335
-0.1107
-0.0092
-0.0460
-0.1370
-0.2022
0.0463
-0.0197
-0.0119
-0.0600
-0.0751
-0.0727
-0.0911
-0.0921
-0.0580
-0.0364
-0.0447
-0.1709
-0.1368
-0.1476
-0.1476
-0.1123
-0.2308
-0.0906
-0.2593
-0.2083
-0.0634
-0.2680
-0.1612
-0.1003
-0.1318
-0.0904
-0.0967
-0.1973
0.0535
-0.0278
-0.0744
-0.0563
-0.0591
-0.0241
-0.1510
0.0022
-0.1889
-0.1289
-0.0196
-0.0728
-0.1840
-0.0609
-0.0200
-0.4031
-0.0734
-0.0960
-0.0865
-0.0683
-0.1690
-0.0987
0.0124
-0.0443
-0.1937
-0.1329
-0.1207
-0.1423
0.1048
-0.0854
0.0638
-0.0578
-0.0865
0.0336
-0.3877
-0.0574
-0.0753
-0.2072
-0.1827
-0.1238
-0.2206
-0.1645
-0.0293
-0.1309
-0.1585
-0.0439
-0.1387
-0.0617
-0.0198
-0.1220
-0.0763
-0.3562
0.3413
-0.0658
-0.1264
-0.1754
0.0417
-0.0362
-0.1289
-0.2704
-0.2141
0.0194
0.0131
-0.0095
-0.0603
-0.0342
-0.0799
-0.3172
-0.0341
-0.1697
-0.1221
-0.3093
-0.1225
-0.0283
-0.1518
-0.0933
-0.2834
-0.0030
-0.0359
0.0030
-0.0460
-0.0855
-0.0382
-0.0174
-0.0826
-0.1509
-0.1769
-0.1501
0.0015
-0.0877
-0.0333
-0.2545
-0.0130
-0.0469
-0.0884
-0.0899
-0.1911
-0.1313
-0.0350
-0.1149
-0.1889
-0.0409
-0.3819
-0.0897
-0.1702
-0.1546
0.0307
-0.0409
-0.0370
-0.0691
-0.0420
0.0535
-0.1312
-0.1199
-0.0578
-0.0546
-0.1234
-0.1162
-0.1232
-0.2521
-0.0410
-0.1056
-0.0522
-0.2415
-0.0947
-0.1564
-0.1230
[torch.FloatTensor of size 256]
), (u'layer3.1.bn2.running_var',
1.00000e-02 *
1.8530
2.6394
1.2856
1.2689
1.4751
1.5978
1.2460
1.8145
1.9383
1.1628
0.7526
1.1219
3.4934
1.7682
2.0756
2.5356
2.3623
1.9827
2.2904
3.4381
1.9890
5.3809
3.1563
1.9644
1.2794
1.9174
1.9996
2.8697
1.8158
2.7271
1.1315
1.1837
2.1068
0.8485
1.6900
1.5894
1.2585
1.5643
0.9987
0.8934
1.8547
1.5648
0.8220
1.3122
1.1694
1.0160
1.0536
2.4400
1.3983
1.6112
1.1215
1.6260
2.9490
3.5517
1.4957
2.0074
1.7436
1.1895
2.0108
3.5077
1.8778
1.2123
1.3372
2.3718
2.4176
1.7590
2.8278
1.4679
1.0504
1.6922
2.4693
1.8485
1.0362
1.6711
1.3731
2.2465
1.1501
2.4804
1.2025
1.2570
2.1939
1.6975
1.7424
2.1701
1.7643
1.9817
2.3719
1.2252
2.7208
6.0497
1.1410
1.2758
1.9986
1.9867
2.2465
1.9493
1.6528
2.3030
1.8762
2.0904
2.1493
2.4125
1.2632
1.4381
1.2692
0.9812
1.6465
1.6313
4.0332
1.5335
1.4138
0.5016
3.3883
1.5893
2.2616
1.2452
1.7113
4.5386
1.6545
3.1452
4.1772
2.2630
2.4911
1.6146
1.9939
1.0476
2.7228
1.4376
2.7456
1.1228
0.9946
0.9961
2.0141
1.8478
0.9278
2.0743
0.8191
2.8920
1.5118
1.3835
1.6313
2.1079
1.2747
1.3377
4.0637
2.5386
2.1835
1.5742
0.8617
1.8719
3.4042
0.9114
0.6896
2.1932
2.0435
3.3022
2.0678
1.2721
4.0421
1.4042
1.4963
1.6277
4.7124
5.3335
1.4413
1.6945
2.4771
2.2726
1.6246
3.9784
2.5942
0.6863
1.6802
2.0793
1.8885
2.3745
1.5412
2.2586
1.8481
2.0519
2.9992
5.0411
1.8868
1.7426
1.4016
1.8388
2.3564
4.0373
6.9144
2.7295
2.0721
1.6795
2.3665
1.8622
1.4306
1.1317
4.2427
1.4678
2.1902
4.3384
4.4179
1.1020
1.7952
1.4674
1.5806
2.5305
2.3667
2.1030
1.1405
1.9686
1.1575
1.3539
0.9093
2.0994
2.3689
2.5597
3.3564
1.6578
1.5100
1.3169
2.9548
2.6140
1.6542
1.3860
1.1822
2.0687
1.8049
0.9810
1.6161
3.9758
1.1737
3.2446
1.3650
2.2535
1.7262
1.2099
0.9317
1.1607
1.1387
1.9526
1.4283
1.0255
2.0608
1.1115
1.6965
1.3392
2.2404
2.1118
3.9554
0.7481
2.4169
1.2689
3.6180
1.6522
1.8249
1.6171
[torch.FloatTensor of size 256]
), (u'layer3.1.bn2.weight', Parameter containing:
0.1971
0.1771
0.1303
0.1995
0.1839
0.0934
0.2333
0.2236
0.1654
0.1280
0.0842
0.1085
0.3168
0.2032
0.3246
0.2184
0.3208
0.2824
0.3408
0.3339
0.3307
0.5571
0.2821
0.3081
0.2114
0.2971
0.2361
0.5500
0.1221
0.3381
0.1528
0.1544
0.1982
0.0582
0.1812
0.2489
0.1954
0.0705
0.0918
0.1328
0.2616
0.2013
0.0720
0.1573
0.1919
0.0813
0.1170
0.2504
0.2863
0.3032
0.1476
0.3696
0.1870
0.2097
0.1907
0.2364
0.1642
0.1079
0.2531
0.1703
0.1266
0.0814
0.2407
0.2609
0.2705
0.2128
0.5007
0.2375
0.0802
0.2896
0.1776
0.0887
0.1094
0.1834
0.2812
0.1971
0.2021
0.3443
0.1411
0.1362
0.2676
0.1618
0.2723
0.2727
0.2528
0.0982
0.4707
0.2239
0.3649
0.1987
0.0815
0.2543
0.3322
0.1561
0.2336
0.1294
0.2570
0.1700
0.1374
0.2215
0.5015
0.3132
0.1487
0.1174
0.0916
0.2130
0.1393
0.3057
0.5634
0.1018
0.0994
0.0492
0.4427
0.3142
0.4002
0.1334
0.2174
0.5522
0.2806
0.2784
0.4333
0.2602
0.3788
0.1827
0.2664
0.1077
0.3001
0.2428
0.5130
0.0829
0.1254
0.1996
0.1451
0.2253
0.1467
0.3712
0.0794
0.5425
0.2058
0.2103
0.1288
0.4993
0.1815
0.1845
0.4154
0.3817
0.2054
0.2205
0.1471
0.4964
0.4202
0.0801
0.0623
0.3536
0.2760
0.3840
0.1632
0.1402
0.2674
0.0844
0.2305
0.2259
0.2146
0.4181
0.2821
0.2926
0.3416
0.4640
0.3025
0.3732
0.5871
0.0616
0.2797
0.3042
0.2173
0.3550
0.2096
0.2449
0.3428
0.2868
0.3543
0.4667
0.3220
0.3805
0.2632
0.2160
0.1924
0.4074
0.4966
0.3623
0.1670
0.1321
0.2374
0.2118
0.1522
0.1668
0.3836
0.0983
0.3729
0.3943
0.4353
0.2270
0.1508
0.3133
0.3850
0.5774
0.1892
0.2822
0.0907
0.2364
0.0964
0.2360
0.0699
0.2938
0.5100
0.3348
0.2339
0.1145
0.2155
0.2266
0.2829
0.2341
0.1891
0.2906
0.2681
0.3876
0.3915
0.1844
0.1889
0.4405
0.1405
0.3460
0.2724
0.2567
0.2785
0.1148
0.1607
0.1754
0.0883
0.1649
0.1268
0.2356
0.2811
0.0766
0.1424
0.1683
0.3979
0.2685
0.6383
0.1087
0.3180
0.1760
0.3634
0.2615
0.1999
0.2541
[torch.FloatTensor of size 256]
), (u'layer3.1.bn2.bias', Parameter containing:
-0.0162
-0.2033
0.0294
-0.1697
-0.1840
-0.0309
-0.2039
-0.1426
-0.0443
-0.0886
-0.0647
-0.0968
-0.0380
-0.2073
-0.3061
0.1443
-0.3079
-0.1232
-0.1627
-0.0980
-0.2471
-0.2837
-0.1201
-0.2893
-0.2303
-0.3562
-0.0825
-0.3483
0.0707
-0.1321
-0.1074
-0.1451
0.0235
0.0225
-0.1885
-0.2507
-0.2461
0.0631
-0.0023
-0.1209
-0.2581
-0.1640
-0.0172
-0.1143
-0.2096
-0.0158
0.0128
-0.1332
-0.3139
-0.2294
-0.1527
-0.3503
0.2086
0.0785
-0.1597
-0.1990
0.0346
0.0388
-0.1269
0.1019
0.0981
-0.0390
-0.2537
-0.1356
-0.1796
-0.2422
-0.4517
-0.3124
-0.0177
-0.2615
-0.1567
0.0212
-0.0753
-0.1426
-0.2788
0.0062
-0.1895
-0.2327
-0.1298
-0.1200
-0.1917
-0.0987
-0.1916
-0.1666
-0.2729
0.1287
-0.4620
-0.2259
-0.2270
0.1939
0.0230
-0.3303
-0.3202
-0.1292
-0.0716
0.0048
-0.2579
-0.0116
-0.0557
-0.1229
-0.4804
-0.2351
-0.1367
-0.0578
-0.0537
-0.2743
-0.0827
-0.1922
-0.3481
-0.0358
-0.1094
0.0138
-0.1888
-0.2592
-0.3293
-0.0820
-0.1839
-0.1636
-0.3163
-0.0246
-0.1667
-0.1653
-0.3076
-0.2229
-0.1834
-0.0536
-0.0621
-0.1752
-0.5243
-0.1933
-0.1119
-0.2283
-0.0437
-0.1777
-0.1300
-0.2519
-0.0456
-0.6305
-0.1364
-0.2138
0.0406
-0.5287
-0.2014
-0.1442
-0.1930
-0.3033
0.1030
-0.1499
-0.2297
-0.5301
-0.2543
-0.0417
0.0429
-0.3218
-0.1611
-0.2562
-0.1187
-0.1001
0.0225
0.0996
-0.2138
-0.2019
0.0808
-0.0121
-0.2364
-0.3247
-0.1482
-0.4846
-0.3449
-0.1365
-0.6664
0.0418
-0.2807
-0.0961
-0.2378
-0.1834
-0.1890
-0.0377
-0.3056
-0.1843
-0.1357
-0.3038
-0.2680
-0.4143
-0.2633
-0.1750
-0.1856
-0.2405
-0.1082
-0.2250
-0.1268
-0.1094
0.0594
-0.1419
-0.1178
-0.1602
-0.0328
-0.0194
-0.1985
0.0470
-0.1887
-0.2776
-0.0930
-0.4092
-0.3378
-0.7252
0.0260
-0.1829
0.0561
-0.2227
-0.0026
-0.3218
-0.0093
-0.2843
-0.5121
-0.2337
-0.0836
-0.0818
-0.1296
-0.2090
0.0169
-0.1899
-0.1892
-0.3075
-0.3108
-0.2986
-0.4712
-0.1823
-0.1893
-0.3131
-0.0876
-0.1166
-0.2995
-0.0831
-0.3427
-0.0772
-0.1460
-0.1611
0.0203
-0.0627
-0.0610
-0.2574
-0.1383
0.0470
-0.0302
-0.1638
-0.3323
-0.1741
-0.6307
-0.0772
-0.2123
-0.1559
-0.0459
-0.2416
-0.0143
-0.2079
[torch.FloatTensor of size 256]
), (u'layer4.0.conv1.weight', Parameter containing:
( 0 , 0 ,.,.) =
-1.1645e-02 -1.9010e-02 -2.1876e-02
2.0482e-02 2.3962e-02 2.9161e-02
4.3672e-02 3.3278e-02 4.9908e-02
( 0 , 1 ,.,.) =
-7.4040e-03 2.8083e-03 -4.7339e-03
6.9030e-03 1.4271e-02 -3.6954e-03
-3.1341e-03 1.3736e-02 1.6127e-03
( 0 , 2 ,.,.) =
1.8676e-02 -1.0553e-02 -1.4233e-02
8.9944e-03 -2.5068e-03 -1.2145e-02
-4.9455e-03 -2.9206e-02 -9.6385e-03
...
( 0 ,253,.,.) =
-1.2655e-02 1.7691e-02 9.8264e-04
7.4271e-03 7.6115e-03 1.1135e-02
2.3242e-02 1.1058e-02 4.0498e-03
( 0 ,254,.,.) =
1.8557e-02 1.2472e-02 1.7220e-02
-4.8544e-03 8.3627e-03 2.2811e-02
-5.1675e-03 2.3264e-02 3.4068e-02
( 0 ,255,.,.) =
2.4934e-02 2.2373e-02 4.2614e-02
1.3486e-02 1.6760e-03 1.3019e-02
-6.2821e-03 -1.5112e-03 -8.9229e-03
⋮
( 1 , 0 ,.,.) =
-9.8089e-04 -6.3011e-03 5.9932e-03
1.5936e-02 1.3394e-02 2.9934e-02
2.3149e-02 2.0709e-02 2.5485e-02
( 1 , 1 ,.,.) =
-2.0015e-02 -3.3349e-02 -8.0396e-03
-7.2800e-03 -1.2187e-02 -2.0389e-04
-1.3138e-02 -2.0427e-02 -1.6286e-02
( 1 , 2 ,.,.) =
-6.7681e-03 5.0045e-03 -2.6683e-03
-2.1073e-02 2.8275e-04 -1.8205e-02
-1.7382e-02 -5.0244e-03 -3.0386e-03
...
( 1 ,253,.,.) =
-1.1035e-02 -2.2964e-02 -1.1028e-02
-6.3256e-03 -4.1667e-03 -1.7323e-02
-1.3611e-02 -2.3468e-02 -1.6436e-02
( 1 ,254,.,.) =
7.3663e-03 6.6219e-03 5.2776e-03
-3.5464e-03 3.2750e-03 -9.1126e-03
3.5593e-04 -1.0151e-02 -1.9123e-02
( 1 ,255,.,.) =
1.8193e-03 8.8087e-03 5.1361e-03
3.1915e-03 2.5287e-02 2.4939e-02
1.3968e-02 1.9613e-02 2.2382e-02
⋮
( 2 , 0 ,.,.) =
-4.1548e-03 8.8964e-03 2.0143e-03
1.1327e-02 1.3251e-02 1.4014e-02
7.2196e-03 1.3045e-02 2.4827e-02
( 2 , 1 ,.,.) =
-1.5025e-02 5.0530e-03 7.4766e-03
-2.4685e-02 -1.6732e-02 -1.0888e-02
-2.8064e-02 -1.1875e-02 -3.4120e-03
( 2 , 2 ,.,.) =
2.8449e-02 1.4594e-02 6.9441e-03
2.4799e-02 1.9453e-02 1.1294e-02
-1.0787e-02 -2.1006e-02 -1.0372e-02
...
( 2 ,253,.,.) =
1.4967e-02 8.2449e-03 2.0244e-03
1.4287e-02 -6.3867e-03 -8.0757e-03
2.7547e-02 1.0791e-02 1.6567e-02
( 2 ,254,.,.) =
3.6191e-02 3.8918e-02 3.9028e-02
-8.3489e-04 1.3273e-02 2.0172e-02
-2.0652e-02 -5.4010e-03 1.7147e-03
( 2 ,255,.,.) =
2.0373e-04 3.5919e-03 8.5592e-03
6.2363e-03 -9.3086e-05 1.2940e-02
1.3152e-02 1.0732e-02 1.9896e-02
...
⋮
(509, 0 ,.,.) =
-1.7400e-02 -6.7019e-03 -9.1787e-03
-9.9672e-03 2.6298e-04 3.3439e-03
1.5721e-02 1.4216e-02 2.0509e-02
(509, 1 ,.,.) =
2.1410e-02 3.6914e-02 2.8239e-02
3.8158e-02 4.8944e-02 3.4652e-02
3.1723e-02 4.4208e-02 4.0035e-02
(509, 2 ,.,.) =
-3.3437e-03 -1.0482e-02 -5.3990e-03
-5.3186e-03 1.1394e-02 1.7593e-03
-5.6652e-03 -6.6373e-03 -1.3492e-02
...
(509,253,.,.) =
-1.7099e-02 -1.8145e-03 -1.3040e-02
-2.2750e-02 -3.6062e-03 -8.0294e-03
-1.6087e-02 -1.0175e-02 -1.3529e-02
(509,254,.,.) =
4.1701e-04 -5.1785e-03 -2.1884e-02
2.6919e-03 8.9139e-03 -1.4217e-04
-7.3746e-03 -6.6853e-03 -2.3725e-02
(509,255,.,.) =
1.9425e-02 1.3175e-02 1.7511e-02
1.8235e-02 4.4286e-02 2.3767e-02
2.6504e-02 3.3104e-02 1.9696e-02
⋮
(510, 0 ,.,.) =
-1.0177e-02 -1.0701e-02 -2.0428e-02
-1.7986e-02 5.9928e-03 -1.0584e-03
-1.8794e-02 -1.8773e-03 -6.9449e-03
(510, 1 ,.,.) =
-2.8498e-03 1.6427e-03 1.4575e-04
-5.4403e-03 8.3667e-03 -9.4164e-03
-4.4999e-03 5.4902e-03 2.4863e-03
(510, 2 ,.,.) =
-1.3356e-02 -2.1525e-02 5.3421e-04
-1.9160e-02 -2.4645e-02 -1.3791e-02
-6.1991e-03 -1.3174e-02 -3.6783e-03
...
(510,253,.,.) =
-3.3993e-03 -2.7823e-03 7.6715e-03
-2.0649e-02 -1.2731e-02 -9.4138e-03
-1.3678e-03 -3.4410e-02 -2.6984e-02
(510,254,.,.) =
-3.5651e-04 2.0102e-03 1.4130e-02
-1.3073e-02 -1.6616e-02 -1.2690e-02
-3.5934e-02 -4.1700e-02 -3.3968e-02
(510,255,.,.) =
2.0470e-02 8.0159e-04 -1.1607e-03
9.5101e-03 3.0336e-02 2.7362e-02
1.5588e-02 3.2851e-02 1.3015e-02
⋮
(511, 0 ,.,.) =
-1.5574e-02 -3.2971e-02 -3.1939e-02
-2.2502e-02 -5.7187e-03 -5.6729e-03
-2.7309e-02 -1.6981e-02 1.2832e-04
(511, 1 ,.,.) =
-1.1925e-02 -2.9479e-02 -2.0437e-02
-2.4408e-02 -2.2069e-02 -1.9965e-03
-2.3279e-02 -5.5140e-03 2.5630e-02
(511, 2 ,.,.) =
-1.6100e-02 -8.2417e-03 1.5266e-04
-2.6195e-03 -8.2754e-03 -2.9435e-02
-2.7493e-03 -2.4889e-02 -2.3583e-02
...
(511,253,.,.) =
1.7985e-02 1.8594e-02 8.9198e-04
-1.7319e-02 7.8735e-03 -2.8659e-03
3.8596e-03 2.9061e-02 2.4188e-02
(511,254,.,.) =
-2.6735e-02 -1.4391e-02 -4.0148e-02
-2.6728e-02 -2.4455e-02 -6.9176e-03
-5.7244e-02 -2.1995e-04 5.5438e-02
(511,255,.,.) =
2.3487e-02 2.7157e-03 -8.4719e-04
1.7886e-02 5.4860e-03 2.8059e-02
4.6468e-03 1.8598e-02 1.3761e-03
[torch.FloatTensor of size 512x256x3x3]
), (u'layer4.0.bn1.running_mean',
-0.1124
-0.1164
-0.1293
-0.4187
-0.3841
-0.4075
-0.5318
-0.1824
-0.7574
-0.8394
-0.1911
-0.2697
-0.4389
-0.2669
-0.4330
-0.4768
-0.4965
-0.4738
-0.1415
-0.4245
-0.3285
-0.5264
-0.8126
-0.4011
-0.3142
-0.4584
-0.1663
-0.4856
-0.3431
-0.5183
-0.4328
-0.6726
-0.4332
-0.4050
-0.1831
-0.4535
-1.0135
-0.0091
-0.4773
-0.3841
-0.5013
-0.7427
-0.4992
-0.5870
-0.3608
-0.4751
-0.6086
-0.3768
-0.6925
-1.2176
-0.5161
-0.4123
-0.3214
-0.1537
-0.3330
-0.3304
-0.4375
-0.5819
-0.4770
-0.5944
-0.2509
0.2625
0.1042
-0.3361
-0.4330
-0.4002
-0.3678
-0.3667
-0.2196
-0.6465
-0.5887
-0.3854
-0.3306
-0.3786
-0.2318
-0.0974
-1.0318
-0.8801
-0.3272
-0.4941
-0.6038
-0.4083
-0.1259
-0.1156
-0.1786
-0.5553
-0.7105
-0.2667
-0.1680
-0.0074
-0.2463
-0.3361
0.1572
-0.6019
-0.4686
-0.3578
-0.5812
-0.2113
-0.3591
-0.5293
-0.7721
-0.5846
-0.1129
-0.4135
-0.4965
-0.6759
-0.4077
-0.4894
-0.3329
-0.3689
-0.0139
-0.1107
-0.3289
0.0494
-0.1049
0.0325
-0.2145
-0.0585
-0.3660
-0.2958
-0.0878
-0.6473
-0.8958
-0.5207
-0.4756
-0.3351
-0.5421
0.0924
-0.9209
0.0610
0.0737
-0.3680
-0.7011
-0.5918
-0.5081
-0.4591
-0.5154
-0.3190
-0.6232
-0.5512
-0.4814
-0.4307
-0.2918
-0.2123
-0.2189
-0.4028
-0.1570
-0.1099
-0.3914
-0.3886
-0.1502
-0.4527
-0.1671
-0.2021
-0.5321
-0.2644
-0.5207
-0.5534
-0.5519
-0.3069
-0.2326
-0.5709
-0.6164
-0.0115
-0.6641
-0.5729
-0.2750
-0.5720
-0.7684
-0.4361
-0.3526
-0.0426
-0.1350
-0.8835
-0.3217
-0.1706
-0.4284
-0.4497
-0.7264
-0.9589
-0.3439
-0.6800
-0.4520
-0.5459
-0.2993
-0.4854
-0.1415
-0.0290
-0.2729
-0.1666
-0.2346
-0.5397
-0.4724
-0.5606
-0.5647
-0.3614
-0.5158
-0.2728
-0.0432
-0.9420
-0.5284
-0.6236
-0.3835
-0.6825
-0.5347
-0.4121
-0.2656
-0.5761
-0.3379
-0.7679
-0.8335
-0.5631
-0.3712
-0.0170
-0.5099
-0.5196
-0.2617
-0.5632
-0.6310
-0.5244
-0.2192
-0.4241
-0.2130
-0.2760
-0.1772
-0.5719
-0.4033
-0.7874
-0.3226
-0.2671
-0.3425
-0.7110
-0.4422
-0.1318
-0.3841
-0.4050
-0.5102
-0.4865
-0.5415
-0.4790
-0.4867
-0.2482
-0.5347
0.1373
-0.9281
-0.3791
-0.0393
-0.6500
-0.0687
-0.2550
-0.7833
-0.1906
0.0692
-0.5203
0.1102
-0.4691
-0.2165
-0.4058
-0.5252
-0.5489
-0.2243
-0.8912
-0.5753
-0.3787
-0.4660
-0.4167
-0.7948
-0.2214
0.2169
-0.3230
-0.5716
-0.4523
-0.2235
-0.5354
-0.6187
-0.6403
-0.5779
-0.6974
-0.4531
-0.4559
-0.6742
-0.8658
-0.6413
-0.3098
0.4122
-0.4813
-0.5268
0.1341
-0.1123
-0.3868
-0.6683
-0.4020
-0.4705
-0.5263
-0.4912
-0.4345
0.0675
-0.7317
-0.3467
-0.4757
-0.4845
-0.1666
-0.5546
-0.2875
-0.5574
-0.2929
-0.9178
0.0932
-0.3473
-0.2659
-0.8700
-0.4143
-0.6691
-0.3896
-0.3993
-0.3583
-0.9644
-0.5416
-0.3117
0.1785
-0.4971
-0.8436
-0.6282
-0.5113
-0.0999
-0.3834
-0.4330
-0.4084
-0.4269
-0.5670
-0.5599
0.2002
-0.3582
-0.7621
-0.4257
-0.4749
-0.2672
-0.4449
-0.4631
-0.5055
-0.3216
-0.5426
-0.2615
-0.5695
-0.2981
-0.8440
-0.6237
-0.6642
-0.4691
-0.9326
-0.6129
0.0988
-0.8381
-0.2735
-0.2299
-0.5881
-0.2101
-0.0520
-0.8218
-0.8467
-0.1617
-0.2244
-0.4366
-0.1205
-0.5751
-0.6796
-0.4050
-0.0679
-0.8405
-0.4547
-0.1708
-0.0480
-0.1587
-0.3734
-0.8896
0.0825
-0.7593
-0.4594
-0.2676
-0.1145
-0.3023
-0.2456
-0.3645
-0.3545
-0.8241
-0.1730
-0.2575
-0.0103
-0.3935
-0.7034
-0.2919
-0.2793
-0.3966
-0.7128
-0.5211
-0.7188
-0.4073
-0.2814
-0.2293
-0.4529
-0.6779
-0.0934
-0.3272
-0.3638
-0.2048
-0.2720
-0.3683
-0.3334
-0.6409
-0.2807
-0.4246
-0.0683
-0.3437
-0.1466
-0.6256
-0.4319
-0.1858
-0.1817
-0.7679
-0.3353
-0.9588
-0.1952
-0.2006
-0.1280
-0.4047
-0.2239
-0.6247
-0.3422
-0.5595
-0.7026
0.0354
-0.5814
-0.7382
-0.3904
-0.3409
-0.8630
-0.3453
-0.1569
-0.6717
-0.5216
-0.2329
-0.3564
-0.7584
-0.0124
-0.5398
-0.7708
-0.3359
-0.2909
-0.3133
-0.3400
-0.5624
-0.5493
-0.4637
-0.4022
-0.3580
-0.3325
-0.3384
-0.2277
-0.2697
-0.2907
-0.2164
-0.2489
-0.0573
-0.3735
-0.3996
-0.3451
-0.4648
-0.7143
-0.2062
-0.2513
-0.4464
-0.2624
-0.1615
-0.3099
-0.7480
-0.7751
-0.3383
-0.2875
-0.5976
-0.3752
-0.9447
0.9778
-0.5259
-0.0119
-0.3122
-0.3802
-0.7690
-0.3534
-0.3268
-0.3882
-0.4871
-0.4404
-0.6773
-0.4915
-0.4891
-0.3313
-0.6497
-0.5303
-0.6193
-0.8063
-0.4356
-0.0466
-0.6772
-0.7360
-0.6388
-0.4199
-0.4575
-0.5776
-0.5648
-0.2510
-0.2753
-0.4860
[torch.FloatTensor of size 512]
), (u'layer4.0.bn1.running_var',
0.1030
0.0999
0.1279
0.0991
0.1004
0.1214
0.1621
0.1000
0.1600
0.1227
0.1053
0.1421
0.1020
0.1322
0.1003
0.1064
0.1366
0.1246
0.1300
0.1984
0.1163
0.1496
0.1630
0.1266
0.1066
0.1212
0.1068
0.1123
0.1615
0.1656
0.1303
0.1438
0.1148
0.1237
0.1166
0.1157
0.1930
0.1042
0.0904
0.0956
0.1063
0.0841
0.1163
0.1349
0.1176
0.1648
0.1115
0.1222
0.1269
0.1880
0.1090
0.1280
0.1211
0.1447
0.0857
0.1207
0.1129
0.1204
0.1305
0.1179
0.1167
0.2456
0.2022
0.0975
0.1263
0.1045
0.2082
0.0831
0.1038
0.1231
0.1124
0.1105
0.1467
0.1336
0.1288
0.1107
0.1366
0.1815
0.1282
0.1306
0.1200
0.1056
0.1189
0.1204
0.1113
0.0986
0.1033
0.1220
0.1178
0.1497
0.1115
0.1130
0.1207
0.1372
0.0990
0.1461
0.0985
0.1184
0.1507
0.1284
0.1085
0.1406
0.1754
0.1095
0.0977
0.0941
0.1799
0.1073
0.0796
0.1153
0.1189
0.1452
0.1099
0.1361
0.2592
0.0810
0.1098
0.1573
0.1272
0.1837
0.1360
0.0959
0.1403
0.1303
0.1342
0.0965
0.1285
0.2215
0.1247
0.1188
0.1204
0.1163
0.1006
0.1711
0.1036
0.1211
0.1361
0.1193
0.0970
0.1104
0.1331
0.1250
0.1168
0.0885
0.1163
0.0893
0.1681
0.0934
0.1244
0.1188
0.1640
0.1081
0.1310
0.1239
0.1269
0.0972
0.1211
0.1043
0.2294
0.1208
0.1018
0.1148
0.1472
0.0975
0.1309
0.1317
0.1914
0.1081
0.1491
0.1132
0.0993
0.1167
0.1473
0.1204
0.1012
0.1201
0.1062
0.1190
0.1336
0.1204
0.1411
0.1816
0.1134
0.1114
0.1735
0.1775
0.1215
0.1389
0.0983
0.1396
0.1208
0.1063
0.1136
0.1248
0.2639
0.1485
0.1077
0.1061
0.1551
0.1727
0.1251
0.1360
0.1216
0.1526
0.1103
0.1045
0.0857
0.1100
0.1190
0.1255
0.1585
0.1270
0.1328
0.1256
0.1079
0.1283
0.0715
0.1163
0.1375
0.0821
0.1461
0.1210
0.1227
0.1141
0.1072
0.1492
0.1203
0.2086
0.1131
0.0747
0.1385
0.1496
0.1287
0.0992
0.1335
0.1515
0.1861
0.1131
0.1649
0.0937
0.1362
0.0959
0.0870
0.1182
0.1449
0.1577
0.1250
0.1298
0.1252
0.1016
0.1787
0.1136
0.1204
0.1126
0.1295
0.1371
0.1136
0.1469
0.1608
0.0994
0.2343
0.1397
0.1145
0.1336
0.1430
0.1121
0.1389
0.1633
0.1050
0.1538
0.1095
0.1131
0.1005
0.1411
0.1132
0.1021
0.1399
0.1045
0.1220
0.1303
0.1541
0.1425
0.1217
0.0941
0.2309
0.0997
0.0928
0.1263
0.1255
0.0886
0.1247
0.1293
0.1076
0.1414
0.0935
0.1377
0.1662
0.1451
0.1051
0.1116
0.1321
0.1895
0.1370
0.1350
0.1285
0.0951
0.1843
0.2134
0.1534
0.1752
0.1721
0.0990
0.1510
0.1052
0.1361
0.1463
0.1610
0.1596
0.1157
0.0883
0.1122
0.1188
0.1181
0.1356
0.1305
0.1074
0.1093
0.1180
0.0961
0.2218
0.1376
0.1151
0.1370
0.1296
0.1005
0.0916
0.1126
0.0927
0.1432
0.0977
0.1235
0.1426
0.1382
0.1386
0.1177
0.1315
0.1288
0.1192
0.1377
0.1368
0.1468
0.1088
0.1311
0.1216
0.1078
0.1238
0.1370
0.1052
0.1886
0.1694
0.1326
0.1314
0.1080
0.1096
0.1218
0.1724
0.1309
0.0959
0.1403
0.1235
0.0953
0.1568
0.1517
0.1197
0.2459
0.1258
0.0967
0.0933
0.0624
0.1311
0.0904
0.1298
0.1141
0.1106
0.1079
0.1113
0.1231
0.1412
0.1160
0.1561
0.2678
0.1610
0.1171
0.1981
0.1232
0.1209
0.1024
0.1028
0.1370
0.0792
0.1250
0.1407
0.1429
0.1258
0.1135
0.1144
0.1183
0.1113
0.1903
0.1068
0.1368
0.1185
0.1132
0.1182
0.1056
0.0920
0.1041
0.1524
0.1525
0.1355
0.2332
0.1436
0.1497
0.1969
0.1629
0.1681
0.1129
0.1383
0.1483
0.1190
0.1353
0.1182
0.1201
0.1007
0.1146
0.1776
0.1119
0.1096
0.1113
0.1581
0.0983
0.1244
0.1458
0.1137
0.1371
0.1809
0.2130
0.1372
0.0939
0.1047
0.1311
0.1918
0.1234
0.0939
0.1210
0.1386
0.0903
0.1178
0.1601
0.1571
0.1462
0.1532
0.2961
0.0971
0.1383
0.1163
0.0939
0.1179
0.1066
0.1472
0.1305
0.1430
0.1198
0.1306
0.1825
0.1339
0.1046
0.0709
0.1584
0.1002
0.1495
0.1604
0.1422
0.1146
0.0903
0.0900
0.1229
0.1567
0.1262
0.1163
0.1507
0.1026
0.1223
0.1404
0.1365
0.1182
0.0923
0.1189
0.1092
0.1069
0.1234
0.2338
0.1229
0.1110
0.0989
0.1133
0.0932
0.1583
0.1236
0.1395
0.1109
0.1057
[torch.FloatTensor of size 512]
), (u'layer4.0.bn1.weight', Parameter containing:
0.2427
0.2232
0.2511
0.2288
0.2074
0.2905
0.2482
0.3102
0.2749
0.2892
0.2448
0.1759
0.2426
0.2780
0.2315
0.2631
0.3383
0.2785
0.2536
0.2989
0.2335
0.2812
0.3486
0.2778
0.2280
0.2547
0.3032
0.2468
0.2512
0.2973
0.2577
0.3200
0.2385
0.2714
0.2532
0.2625
0.3344
0.2626
0.1838
0.2839
0.2187
0.2666
0.2858
0.2471
0.2915
0.2332
0.2637
0.2691
0.2432
0.2384
0.2356
0.2525
0.2564
0.2451
0.2529
0.2522
0.2800
0.3165
0.2340
0.2634
0.2569
0.1942
0.2621
0.2205
0.2301
0.2323
0.2811
0.1897
0.2280
0.3472
0.2717
0.3191
0.2440
0.2719
0.2781
0.2262
0.3444
0.2648
0.2725
0.2851
0.2039
0.2935
0.2742
0.2774
0.2654
0.2430
0.2721
0.2708
0.3085
0.2895
0.2596
0.2147
0.3119
0.3449
0.2262
0.2814
0.2326
0.2712
0.2637
0.2323
0.3333
0.2714
0.2991
0.2747
0.2515
0.2394
0.2709
0.2836
0.2866
0.2408
0.2560
0.2048
0.2394
0.2813
0.3267
0.2761
0.2123
0.2715
0.2540
0.2771
0.3209
0.1905
0.3989
0.2676
0.2357
0.2169
0.3216
0.3596
0.2838
0.2648
0.2702
0.2469
0.2442
0.2553
0.2599
0.2693
0.2399
0.2700
0.2063
0.2711
0.2834
0.2781
0.2529
0.2013
0.2343
0.2082
0.3063
0.1635
0.2673
0.2197
0.2787
0.2724
0.2744
0.2287
0.2969
0.2662
0.2982
0.2396
0.3039
0.2319
0.2773
0.2661
0.2898
0.2489
0.3060
0.2612
0.2937
0.3045
0.2999
0.2580
0.2093
0.2714
0.2993
0.2679
0.2963
0.2754
0.2580
0.2566
0.2634
0.2325
0.2442
0.2934
0.2398
0.2631
0.2851
0.2870
0.2239
0.2410
0.2676
0.2681
0.2638
0.2732
0.2812
0.2203
0.2670
0.2764
0.2550
0.3160
0.2888
0.2615
0.2178
0.2485
0.2414
0.2798
0.2872
0.2767
0.2551
0.2429
0.2459
0.3288
0.3024
0.2912
0.2625
0.3019
0.2643
0.2721
0.2108
0.2368
0.2269
0.1988
0.2830
0.2569
0.2349
0.2755
0.2442
0.2717
0.2747
0.2785
0.2516
0.2227
0.2783
0.2465
0.2652
0.2641
0.2960
0.2671
0.2679
0.2537
0.2847
0.2507
0.2525
0.2024
0.2311
0.2618
0.2764
0.3031
0.2452
0.2716
0.2273
0.2295
0.2611
0.2329
0.2690
0.2753
0.2737
0.2590
0.2421
0.2685
0.3392
0.3073
0.1371
0.3650
0.2980
0.2460
0.2487
0.2912
0.2704
0.2560
0.2213
0.2569
0.2661
0.2367
0.2742
0.2847
0.3055
0.2671
0.2819
0.2791
0.2401
0.2549
0.2210
0.3507
0.2852
0.2162
0.2821
0.2369
0.2905
0.2826
0.2300
0.2745
0.2437
0.2522
0.2489
0.2395
0.2851
0.2887
0.2621
0.2500
0.2689
0.2427
0.3010
0.3067
0.2861
0.2387
0.2462
0.2859
0.2550
0.2630
0.2442
0.2145
0.2898
0.2282
0.2327
0.2242
0.2738
0.2485
0.2379
0.3058
0.2798
0.2761
0.2252
0.2866
0.2660
0.3250
0.2612
0.2767
0.3205
0.2932
0.3183
0.2939
0.3103
0.2553
0.2981
0.3667
0.3086
0.2254
0.2352
0.2348
0.2555
0.2597
0.2369
0.3017
0.2776
0.2728
0.3174
0.2785
0.2721
0.2637
0.2702
0.3633
0.2869
0.2675
0.3405
0.2587
0.2732
0.2747
0.2821
0.2750
0.2630
0.2018
0.2358
0.3034
0.3155
0.3013
0.2775
0.2511
0.2945
0.1605
0.2825
0.2964
0.2194
0.2061
0.2332
0.2348
0.2663
0.2543
0.2927
0.2215
0.2521
0.2827
0.1993
0.2453
0.2597
0.2654
0.2757
0.2650
0.2444
0.2949
0.2308
0.3071
0.1904
0.3024
0.2786
0.3659
0.2966
0.2746
0.2449
0.2201
0.2564
0.2853
0.2392
0.2457
0.2467
0.2374
0.2664
0.2460
0.3182
0.1793
0.2379
0.2596
0.2847
0.2452
0.1974
0.2388
0.2949
0.2879
0.2786
0.2765
0.3296
0.2530
0.2690
0.2547
0.2333
0.2348
0.2690
0.2718
0.2679
0.2516
0.2710
0.2366
0.2601
0.2764
0.2880
0.2008
0.2637
0.2263
0.2511
0.2604
0.2805
0.2989
0.2965
0.2597
0.2767
0.2553
0.2959
0.2512
0.2925
0.3008
0.2423
0.2394
0.2708
0.3704
0.2879
0.2532
0.2248
0.2023
0.2279
0.2366
0.3082
0.2980
0.2909
0.2777
0.4293
0.2658
0.2940
0.2418
0.2816
0.3247
0.2647
0.2216
0.2758
0.2421
0.2078
0.2332
0.2271
0.2611
0.3650
0.2017
0.2598
0.2160
0.2641
0.1408
0.2664
0.2502
0.2553
0.2227
0.2417
0.2696
0.2388
0.2833
0.2333
0.2667
0.2224
0.2691
0.2710
0.2459
0.2674
0.2430
0.2593
0.1851
0.2950
0.3664
0.2212
0.3026
0.1840
0.3443
0.2140
0.3717
0.2360
0.3081
0.2638
0.2233
[torch.FloatTensor of size 512]
), (u'layer4.0.bn1.bias', Parameter containing:
-0.1986
-0.1593
-0.2054
-0.1598
-0.1268
-0.3226
-0.1597
-0.3477
-0.2497
-0.2730
-0.2319
-0.0286
-0.1899
-0.2813
-0.1733
-0.2412
-0.3712
-0.2747
-0.2053
-0.2585
-0.1535
-0.2748
-0.3241
-0.2525
-0.1906
-0.2252
-0.3436
-0.2202
-0.1664
-0.2716
-0.1920
-0.3399
-0.2026
-0.2972
-0.2616
-0.2238
-0.2486
-0.2606
-0.0893
-0.3572
-0.1283
-0.2583
-0.2450
-0.1523
-0.3165
-0.1445
-0.2522
-0.1963
-0.1794
-0.1071
-0.1662
-0.2053
-0.2530
-0.1447
-0.2517
-0.2062
-0.2817
-0.3376
-0.1382
-0.2389
-0.2557
-0.0156
-0.2169
-0.1763
-0.1486
-0.2122
-0.2002
-0.0716
-0.2089
-0.3580
-0.2588
-0.3599
-0.1528
-0.2107
-0.2925
-0.1855
-0.3970
-0.1257
-0.2574
-0.2412
-0.0863
-0.3065
-0.2701
-0.3380
-0.2485
-0.1935
-0.2987
-0.2279
-0.3600
-0.2764
-0.2480
-0.1208
-0.3378
-0.2661
-0.1677
-0.2470
-0.2152
-0.2591
-0.1936
-0.1543
-0.4117
-0.1570
-0.2372
-0.2997
-0.2124
-0.2034
-0.1848
-0.3070
-0.3438
-0.1839
-0.1937
-0.0916
-0.2338
-0.3558
-0.1967
-0.3303
-0.1398
-0.2177
-0.1665
-0.1857
-0.3115
-0.1049
-0.4229
-0.2408
-0.1320
-0.1631
-0.3378
-0.3300
-0.3183
-0.2268
-0.2787
-0.1950
-0.1950
-0.1463
-0.2437
-0.2297
-0.1282
-0.2164
-0.1179
-0.2437
-0.2611
-0.2656
-0.1948
-0.1208
-0.1668
-0.1351
-0.2713
-0.0560
-0.2243
-0.1318
-0.2356
-0.2720
-0.2051
-0.1736
-0.2891
-0.2627
-0.3358
-0.1779
-0.2309
-0.1477
-0.2685
-0.1882
-0.2629
-0.1983
-0.3522
-0.1905
-0.2778
-0.3395
-0.2895
-0.2240
-0.1150
-0.2462
-0.2426
-0.2581
-0.3133
-0.2315
-0.2271
-0.2077
-0.2109
-0.1371
-0.1323
-0.2529
-0.1716
-0.2532
-0.2277
-0.2084
-0.1803
-0.1868
-0.2404
-0.2166
-0.2197
-0.2870
-0.3062
-0.1507
-0.1054
-0.2199
-0.2415
-0.3310
-0.2700
-0.1568
-0.1449
-0.2610
-0.1828
-0.2648
-0.3134
-0.2937
-0.2687
-0.2115
-0.2164
-0.4522
-0.2999
-0.3032
-0.2292
-0.3099
-0.2642
-0.2695
-0.1441
-0.1671
-0.1570
-0.1415
-0.2222
-0.1736
-0.1481
-0.2573
-0.2060
-0.1703
-0.2360
-0.1770
-0.2132
-0.2016
-0.3001
-0.1518
-0.2086
-0.2805
-0.2698
-0.2292
-0.1293
-0.2514
-0.2600
-0.2454
-0.1744
-0.1029
-0.1679
-0.2353
-0.2007
-0.3363
-0.1640
-0.2430
-0.1699
-0.1697
-0.1837
-0.1625
-0.2415
-0.2687
-0.2305
-0.2029
-0.2209
-0.2240
-0.2675
-0.3233
0.1462
-0.4777
-0.2376
-0.1489
-0.1462
-0.3055
-0.2234
-0.1697
-0.1952
-0.2131
-0.2340
-0.2039
-0.3054
-0.2596
-0.3470
-0.2176
-0.2706
-0.2897
-0.1729
-0.2300
-0.1066
-0.3556
-0.2912
-0.1777
-0.2007
-0.1699
-0.3009
-0.3046
-0.1693
-0.2602
-0.2053
-0.1810
-0.1808
-0.1730
-0.3757
-0.1808
-0.1805
-0.1895
-0.2643
-0.2075
-0.2365
-0.1975
-0.3064
-0.1984
-0.1811
-0.3676
-0.1198
-0.1485
-0.1770
-0.0781
-0.2052
-0.1360
-0.1417
-0.1691
-0.2395
-0.1785
-0.1747
-0.2484
-0.2717
-0.3096
-0.1465
-0.2239
-0.2584
-0.3572
-0.2311
-0.2878
-0.3841
-0.3475
-0.3896
-0.1891
-0.2861
-0.2431
-0.2837
-0.4365
-0.3353
-0.1802
-0.1976
-0.1529
-0.1978
-0.2535
-0.1954
-0.2667
-0.2813
-0.2487
-0.3070
-0.2339
-0.2212
-0.1925
-0.2224
-0.4178
-0.3151
-0.2663
-0.3581
-0.1935
-0.2385
-0.2424
-0.1850
-0.2265
-0.1803
-0.0777
-0.1492
-0.3361
-0.4133
-0.3123
-0.2745
-0.1247
-0.3102
0.0041
-0.1981
-0.3301
-0.2047
-0.1053
-0.1653
-0.1634
-0.1116
-0.2314
-0.3191
-0.1818
-0.2657
-0.2220
-0.1029
-0.1999
-0.2702
-0.2139
-0.2256
-0.2653
-0.1630
-0.3322
-0.1617
-0.3446
0.0288
-0.2456
-0.3171
-0.3580
-0.2857
-0.2520
-0.2031
-0.1522
-0.2203
-0.3490
-0.1685
-0.1424
-0.1602
-0.1553
-0.3057
-0.2420
-0.3536
-0.0551
-0.0987
-0.2272
-0.2619
-0.2035
-0.0906
-0.1976
-0.3040
-0.2732
-0.3161
-0.2102
-0.3384
-0.1740
-0.1475
-0.1842
-0.1823
-0.1151
-0.2183
-0.2010
-0.2659
-0.2205
-0.2567
-0.1633
-0.2213
-0.2658
-0.2938
-0.1069
-0.2522
-0.1103
-0.2216
-0.2244
-0.2908
-0.2176
-0.3605
-0.2374
-0.2391
-0.2251
-0.2256
-0.1339
-0.1970
-0.2970
-0.2206
-0.2051
-0.2229
-0.3602
-0.2923
-0.2498
-0.1466
-0.0979
-0.1686
-0.2158
-0.2881
-0.3002
-0.2760
-0.2496
-0.3536
-0.2868
-0.3251
-0.1847
-0.3062
-0.3861
-0.2650
-0.1339
-0.1846
-0.1630
-0.0630
-0.1717
-0.1415
-0.1906
-0.4611
-0.1391
-0.1920
-0.1369
-0.1647
-0.0055
-0.2598
-0.2653
-0.2319
-0.1780
-0.1913
-0.2055
-0.1891
-0.2625
-0.1633
-0.2497
-0.1696
-0.1907
-0.2431
-0.1825
-0.2607
-0.1943
-0.2361
-0.0581
-0.2758
-0.2593
-0.1466
-0.3589
-0.0439
-0.3440
-0.1089
-0.4219
-0.1503
-0.2792
-0.3035
-0.1156
[torch.FloatTensor of size 512]
), (u'layer4.0.conv2.weight', Parameter containing:
( 0 , 0 ,.,.) =
1.6218e-04 -1.4720e-02 -1.7000e-02
-1.2850e-02 -3.3085e-02 -3.6656e-02
2.7812e-02 1.7691e-02 -1.8369e-02
( 0 , 1 ,.,.) =
1.0528e-02 3.1379e-02 2.4801e-02
-1.2698e-02 -2.9453e-02 -1.1834e-02
-9.4094e-03 -8.9462e-03 -3.1349e-02
( 0 , 2 ,.,.) =
-7.8447e-03 -2.9256e-02 5.3590e-03
-1.3791e-02 -1.1116e-02 5.0388e-03
-2.4919e-03 7.3514e-03 5.4013e-03
...
( 0 ,509,.,.) =
-1.0276e-03 -1.0275e-02 -2.9986e-02
-3.8465e-03 1.9549e-03 -1.6291e-02
-1.8100e-03 8.3778e-03 -8.5481e-03
( 0 ,510,.,.) =
-1.8196e-02 -1.3533e-02 -1.7457e-02
2.2457e-02 5.7402e-02 1.9325e-02
-2.4977e-02 -3.2113e-02 -8.1780e-03
( 0 ,511,.,.) =
3.6550e-03 4.9358e-03 -5.7597e-03
-1.6875e-02 1.3999e-04 3.7629e-04
-2.6272e-03 1.0947e-03 1.1145e-03
⋮
( 1 , 0 ,.,.) =
1.4018e-02 3.9198e-03 -1.7189e-03
-1.3175e-03 4.3503e-04 -1.1798e-02
-9.8003e-03 -1.7693e-02 -1.9910e-02
( 1 , 1 ,.,.) =
-1.4957e-02 -1.9796e-02 -2.8724e-02
5.8908e-03 -1.5228e-02 -5.6715e-03
2.9284e-03 -1.8028e-02 -7.1433e-03
( 1 , 2 ,.,.) =
-1.1625e-02 -3.3804e-02 -1.0025e-02
-1.6606e-02 -5.5716e-02 -2.3204e-02
-2.5758e-02 -4.3135e-02 -2.5901e-02
...
( 1 ,509,.,.) =
-1.5007e-02 -1.4333e-02 -2.5937e-03
-2.3078e-02 -1.5820e-02 -2.2818e-03
-4.1318e-03 -8.0353e-03 -2.3236e-03
( 1 ,510,.,.) =
-1.8531e-02 -1.8004e-02 -2.8084e-02
-3.6680e-02 -6.8641e-02 -5.2469e-02
-1.1712e-02 -2.4334e-02 -1.6733e-02
( 1 ,511,.,.) =
-2.2078e-02 -2.9163e-02 -3.8717e-03
-7.0301e-03 1.6718e-02 5.4339e-03
-1.3131e-02 1.1999e-02 -1.7480e-02
⋮
( 2 , 0 ,.,.) =
-5.2378e-03 -3.4890e-03 -2.0851e-03
1.5306e-02 -2.1752e-02 -8.7682e-03
2.2460e-02 9.9175e-03 -3.3635e-03
( 2 , 1 ,.,.) =
7.4677e-03 -9.1762e-03 -9.2569e-05
1.9441e-04 1.2344e-03 -8.9978e-03
-5.1243e-04 2.1850e-04 -4.8828e-03
( 2 , 2 ,.,.) =
1.7078e-02 3.3955e-03 9.3503e-03
2.0334e-02 -1.0621e-04 -8.2017e-05
1.0706e-02 -1.8414e-03 1.0828e-02
...
( 2 ,509,.,.) =
3.2008e-02 2.3494e-02 2.5386e-02
1.9307e-02 2.3924e-02 2.8972e-02
9.9003e-03 2.0158e-02 2.2655e-02
( 2 ,510,.,.) =
-9.8395e-03 -1.1114e-02 -3.7696e-03
-2.9508e-02 -3.6956e-02 -1.8228e-02
-1.3663e-03 -2.5845e-03 1.0352e-02
( 2 ,511,.,.) =
-7.3867e-03 -2.5413e-02 -2.1942e-02
-1.6699e-02 -1.5133e-02 -1.3030e-02
-2.0090e-02 3.7970e-03 -1.0341e-02
...
⋮
(509, 0 ,.,.) =
-1.6157e-02 -1.6883e-02 -2.8328e-04
-7.7759e-03 -2.4465e-03 -1.4641e-02
2.4639e-02 3.9862e-02 2.1048e-02
(509, 1 ,.,.) =
2.4491e-03 -9.3885e-03 -1.1786e-02
2.5301e-02 2.5625e-04 7.1335e-03
2.2342e-02 1.9042e-02 7.2526e-03
(509, 2 ,.,.) =
-1.4652e-02 -2.7802e-02 -4.3564e-03
-1.7961e-02 -4.3846e-02 2.7409e-03
-4.7968e-03 -8.4231e-03 1.2070e-02
...
(509,509,.,.) =
-2.0171e-02 -3.3546e-02 -1.6728e-02
-1.7847e-02 -5.1713e-02 -2.6780e-02
-1.3145e-03 -4.3181e-03 -9.6373e-03
(509,510,.,.) =
-5.3917e-03 -2.0410e-04 2.7798e-03
-9.6882e-04 -2.5141e-02 1.4804e-02
2.8748e-02 9.0832e-03 4.2548e-02
(509,511,.,.) =
-1.5698e-02 -1.9303e-02 -9.1469e-03
-2.0025e-02 -1.1131e-02 -3.3902e-02
-5.7436e-03 -7.3640e-03 -1.0044e-02
⋮
(510, 0 ,.,.) =
-8.8612e-03 -4.5370e-03 -1.2354e-02
-5.9245e-03 -1.7058e-02 -2.8041e-02
-1.0435e-02 7.6695e-04 -1.0578e-02
(510, 1 ,.,.) =
9.5200e-03 -5.1975e-03 1.2947e-02
4.4305e-03 -2.3992e-02 -8.4569e-04
4.6608e-03 9.6787e-03 8.2174e-03
(510, 2 ,.,.) =
5.1559e-03 4.4635e-04 -7.9934e-03
3.3069e-03 1.4450e-02 8.9234e-03
6.3402e-03 1.9043e-02 1.9021e-02
...
(510,509,.,.) =
7.6964e-03 -1.3777e-02 6.0539e-03
-1.5745e-03 -2.3391e-02 -1.0052e-02
9.5183e-03 -1.2251e-02 2.2436e-03
(510,510,.,.) =
1.0375e-02 3.5875e-03 -5.7940e-04
7.0412e-03 -1.0673e-02 -4.9120e-03
-2.6034e-03 1.1306e-02 7.0696e-03
(510,511,.,.) =
-1.7509e-02 -2.3182e-02 -1.7897e-02
-1.7769e-03 1.9672e-03 -7.3220e-03
-6.6833e-03 9.8286e-03 2.0653e-03
⋮
(511, 0 ,.,.) =
2.8375e-02 -8.1936e-03 1.8009e-02
1.5829e-02 -1.3571e-02 -1.9335e-02
4.0766e-03 -1.5722e-02 -5.0620e-02
(511, 1 ,.,.) =
-5.5310e-03 -1.8996e-02 -7.9436e-03
1.3825e-03 -4.9608e-02 1.7256e-03
7.6629e-03 -7.6101e-03 1.2541e-02
(511, 2 ,.,.) =
1.8052e-02 3.1718e-02 4.2556e-03
-3.6760e-03 3.0490e-03 -1.2264e-02
-8.9404e-03 -1.6604e-02 1.6348e-03
...
(511,509,.,.) =
5.3192e-03 1.8204e-02 1.8114e-02
-6.1202e-03 1.5905e-03 2.0264e-02
-1.1471e-02 -1.5697e-02 9.0871e-03
(511,510,.,.) =
3.7707e-03 8.0599e-03 1.8290e-02
1.7257e-02 6.9638e-03 1.8746e-02
1.0751e-02 1.3663e-02 -1.0081e-03
(511,511,.,.) =
1.9711e-02 -1.4569e-02 -2.4663e-02
2.5966e-03 -2.4807e-02 9.3861e-03
-1.2876e-03 1.3974e-03 1.3434e-02
[torch.FloatTensor of size 512x512x3x3]
), (u'layer4.0.bn2.running_mean',
-0.2323
-0.2009
-0.1230
-0.1102
-0.0945
-0.1073
-0.1357
-0.1954
-0.1826
-0.1890
-0.1432
-0.1667
-0.1086
-0.1213
-0.1614
-0.1109
-0.1794
-0.1853
-0.1421
-0.1549
-0.1322
-0.1870
-0.1730
-0.1042
-0.1547
-0.1679
-0.1846
-0.1568
-0.1340
-0.0786
-0.1664
-0.1481
-0.1538
-0.1475
-0.1476
-0.1409
-0.2402
-0.0707
-0.0515
-0.1052
-0.1535
-0.2514
-0.1963
-0.1318
-0.1389
-0.1726
-0.2069
-0.1794
-0.0709
-0.1851
-0.1337
-0.0983
-0.1463
-0.1685
-0.1355
-0.1603
-0.1008
-0.1787
-0.2180
-0.1460
-0.1948
-0.1348
-0.2020
-0.1971
-0.1880
-0.0911
-0.1778
-0.1945
-0.0790
-0.2138
-0.1080
-0.1863
-0.1487
-0.1820
-0.1090
-0.1556
-0.1834
-0.1325
-0.1903
-0.1287
-0.1414
-0.0978
-0.0961
-0.1062
-0.1628
-0.1549
-0.1931
-0.1080
-0.1673
0.0160
-0.1061
-0.0820
-0.1730
-0.1498
-0.1451
-0.1533
-0.0621
-0.1445
-0.0918
-0.1844
-0.2001
-0.1960
-0.2816
-0.1050
-0.0827
-0.0967
-0.1907
-0.2045
-0.1425
-0.1722
-0.1856
-0.1481
-0.0500
-0.1964
-0.2529
-0.1305
-0.1833
-0.1010
-0.1678
-0.0904
-0.1421
-0.1829
-0.1483
-0.1604
-0.2044
-0.2470
-0.2574
0.0411
-0.1046
-0.0987
-0.1557
-0.1563
-0.1190
-0.0537
-0.1004
-0.1289
-0.1472
-0.1177
-0.1001
-0.1697
-0.1182
-0.1078
-0.0982
-0.0848
-0.1159
-0.2130
-0.1836
-0.1310
-0.1298
-0.1068
-0.1665
-0.1800
-0.1908
-0.1894
-0.1793
-0.1248
-0.1584
-0.0122
-0.1105
-0.0558
-0.1281
-0.0900
-0.1077
-0.0362
-0.1808
-0.1684
-0.1897
-0.1344
-0.1439
-0.1051
-0.1875
-0.1760
-0.0602
-0.1801
-0.1497
-0.0929
-0.1682
-0.1124
-0.2335
-0.1392
-0.1584
-0.1489
-0.0831
-0.1165
-0.1117
-0.1485
-0.2255
-0.0950
-0.3208
-0.1579
-0.2568
-0.0963
-0.1540
-0.0998
-0.1362
-0.2135
-0.2037
-0.1357
-0.1213
-0.1102
-0.1944
-0.1846
-0.1457
-0.1200
-0.1515
-0.1240
-0.1507
-0.1459
-0.1838
-0.0138
-0.1594
-0.1894
-0.1916
-0.0972
-0.1900
-0.0623
-0.1021
-0.0999
-0.2354
-0.2060
-0.1587
-0.0965
-0.0440
-0.1450
-0.2433
-0.1366
-0.1435
-0.1234
-0.0996
-0.1855
-0.1259
-0.1713
-0.2071
0.3675
-0.1830
-0.0825
-0.1592
0.4767
-0.1776
-0.2842
-0.1735
-0.2587
-0.0918
-0.0702
-0.1917
-0.2316
-0.1425
-0.2009
-0.1625
-0.0506
-0.1747
-0.1638
-0.0841
-0.0549
-0.1516
-0.2360
-0.1172
-0.1092
-0.0038
-0.1679
-0.1220
-0.0646
-0.1783
-0.1515
-0.0512
-0.0918
-0.0816
-0.1421
-0.1359
0.0083
-0.2484
-0.1884
-0.0736
-0.1139
-0.2213
-0.1780
-0.1929
-0.1703
-0.1334
-0.2096
-0.1853
-0.1166
-0.1438
-0.1881
0.1436
-0.0686
-0.1421
-0.1335
-0.1524
-0.2322
-0.2406
-0.0871
-0.1397
-0.1480
-0.1512
-0.1262
-0.1244
-0.1173
-0.1291
-0.1326
-0.1113
-0.2080
-0.1329
-0.1498
-0.1469
-0.1715
-0.1090
-0.1383
-0.0470
-0.1454
-0.1737
-0.2443
-0.1302
-0.0830
-0.1078
-0.1338
-0.1451
-0.1278
-0.1947
-0.0877
-0.1288
-0.1151
-0.1809
-0.1068
-0.1797
-0.1599
-0.1411
-0.2104
-0.1189
-0.1548
-0.1769
-0.2152
-0.2549
-0.1977
-0.1385
-0.2339
-0.2481
-0.0978
-0.0979
-0.0697
-0.1257
-0.0927
-0.1224
-0.1986
-0.1570
-0.1845
-0.1944
-0.1407
-0.1315
-0.1657
-0.1823
-0.1973
-0.1310
-0.1542
-0.1280
-0.0570
-0.1538
-0.1152
-0.1496
-0.0477
-0.1515
-0.1121
-0.1667
-0.1651
-0.0415
-0.1299
-0.1688
-0.1143
-0.0564
-0.0792
-0.2140
-0.1535
-0.3087
-0.1927
-0.1168
-0.1704
-0.1362
-0.1538
-0.0804
-0.1526
-0.1125
-0.1438
-0.0565
-0.0919
-0.1430
-0.1173
-0.2215
-0.1624
-0.2277
-0.1165
-0.1816
-0.0899
-0.0536
-0.1443
-0.1539
-0.1223
-0.1396
-0.1891
-0.1502
-0.1522
-0.1075
-0.0810
-0.1276
-0.2212
-0.0747
-0.1313
-0.1488
-0.1832
-0.1005
-0.1688
-0.2087
-0.2055
-0.0563
-0.1815
-0.0792
-0.1381
-0.0926
-0.1136
-0.2200
-0.1026
-0.1947
-0.0892
-0.1711
-0.0661
-0.1989
-0.1249
-0.1797
-0.2079
-0.1755
-0.0912
-0.3122
-0.1278
-0.1374
-0.2122
-0.1040
-0.1104
-0.2023
-0.1358
-0.1437
-0.1378
-0.1535
-0.1709
-0.2460
-0.1195
-0.1224
-0.1485
-0.2050
-0.1663
-0.1218
-0.1084
-0.0843
-0.1545
-0.1624
-0.0402
-0.0948
-0.1233
-0.1247
-0.0949
-0.1890
-0.1409
-0.1957
-0.1521
-0.1791
-0.1771
-0.0799
-0.1515
-0.2390
-0.0979
-0.1374
-0.1728
-0.0893
-0.2172
-0.1527
-0.1728
-0.1136
-0.1325
-0.2203
-0.1285
-0.1800
-0.2360
-0.1478
-0.0860
-0.1360
-0.1366
-0.1958
-0.1150
-0.1826
-0.2315
-0.1288
-0.2192
-0.0944
-0.1645
-0.1989
-0.1677
-0.1951
-0.1835
-0.0518
-0.0916
-0.1564
-0.1527
-0.1834
-0.1424
-0.0484
-0.0799
-0.0930
-0.0937
-0.1105
-0.1594
-0.1454
-0.2459
-0.0755
-0.2106
[torch.FloatTensor of size 512]
), (u'layer4.0.bn2.running_var',
1.00000e-02 *
2.4833
3.3080
2.0296
1.5249
2.0692
1.8305
2.0471
2.8226
3.0079
2.4272
2.7239
2.1119
1.6120
4.8926
1.5775
1.9033
2.0561
3.5445
2.2633
1.4559
2.1948
2.6594
2.5071
1.5912
1.9282
2.1082
1.9509
1.9900
1.6651
1.8938
2.1580
2.5958
2.0472
2.1154
1.6775
1.4313
2.8038
2.0398
2.3671
1.3760
1.9427
2.1632
2.3166
1.3648
2.5834
2.0891
1.9066
3.4593
2.1960
2.2518
2.0690
1.3641
1.6057
1.9966
2.0539
1.7946
1.7566
1.9128
2.2047
2.9701
1.7670
1.9960
2.9041
2.3745
2.3840
2.0386
2.5736
1.7321
1.6626
1.8936
3.6740
2.3555
1.7346
2.9061
1.7480
2.0982
1.6436
1.7391
2.2283
1.9045
1.5922
2.6576
1.8965
2.4633
2.2448
2.3271
2.6828
1.5013
3.4970
2.7197
2.4104
2.4977
1.8593
1.8319
2.3605
2.9364
2.0061
2.1858
2.2766
2.0778
3.7099
2.7477
2.4862
1.7150
1.6191
1.5232
3.0046
2.6621
1.8450
2.9335
1.7999
2.5333
1.8225
2.6072
2.3344
1.9952
2.7224
3.9102
1.7148
1.8970
2.6572
2.3887
2.5440
1.9029
1.8488
1.9150
2.2768
2.1362
1.5905
2.0834
2.0401
2.5575
2.2002
1.2720
1.5156
1.7273
2.4564
2.0573
1.9230
1.7903
2.1950
1.9275
1.9678
2.0337
2.0774
2.3042
2.2799
1.7380
2.5705
2.0541
3.0618
2.0408
1.8540
2.5696
1.4412
2.2202
1.8074
3.2491
2.3889
1.7946
1.9074
2.2918
1.8890
2.9527
2.1006
2.5455
2.3745
2.0723
2.0327
2.0734
2.0228
1.9176
1.7930
2.2085
1.7270
2.3272
2.2734
1.8007
3.7277
2.0109
2.5690
2.3128
2.7003
2.4481
2.0348
2.9298
1.9656
2.0298
3.2104
1.7097
2.0729
1.6681
2.9341
1.3314
2.2363
1.5633
1.8116
3.0468
2.0086
2.1300
2.8081
1.7087
3.4536
2.1716
2.4298
1.6968
2.1991
1.4881
1.9965
1.6619
2.4966
2.3971
1.9127
2.3055
2.0037
2.4586
2.4219
1.9185
2.3733
2.4952
2.1067
1.6952
2.2617
1.6901
2.5003
1.8883
1.9898
2.0216
2.0317
2.4188
1.9648
1.8298
1.8622
6.7734
1.8365
1.4915
2.3664
8.2619
2.5052
4.0331
1.8407
3.2252
3.3313
1.6555
2.0685
2.6944
1.5494
1.8364
1.8372
3.4329
3.8219
1.6332
1.5061
1.5214
2.0056
3.3673
2.1137
2.7841
1.9850
2.1473
2.3712
2.5655
2.2647
1.6206
1.6700
2.4116
1.6932
2.5522
1.6277
2.9663
3.7830
1.8506
1.7631
2.8417
2.9806
2.1214
2.1561
2.1888
2.1089
2.4743
2.3409
1.9061
1.7674
2.7206
3.4606
1.6863
1.8932
2.4011
2.2686
1.7131
2.2803
1.7171
1.9732
1.7178
1.9143
1.4510
3.2377
2.2500
1.8290
2.1394
3.0828
2.1373
2.0031
2.3608
2.8301
1.8092
2.3573
1.9300
1.8900
1.9180
2.2582
3.1516
2.6300
1.6959
2.0105
1.9393
4.1140
1.5049
3.3769
5.2802
2.8814
1.9997
2.0849
2.0606
2.1785
1.3761
2.1078
1.5782
1.8571
2.5762
3.7403
3.2722
2.1694
1.7374
1.8202
1.9531
5.2114
1.5209
1.8567
2.2269
2.0769
6.5523
1.6649
3.6942
2.0398
1.6697
2.2643
2.3169
2.5668
4.6674
1.8211
2.1373
2.0317
1.8884
1.8498
1.6197
2.4375
1.6976
1.8281
1.4417
2.8025
2.0342
2.6802
1.8525
2.3066
1.5621
2.3369
2.0752
2.5609
1.8787
3.0633
2.4343
9.0075
2.2312
2.1592
1.6924
2.0200
1.7122
2.2771
1.5618
2.9398
1.9049
2.7112
1.7003
1.6870
2.1307
1.6659
1.5115
2.2211
2.0252
1.8544
1.4517
1.3800
2.9232
1.6665
1.9171
1.6493
1.9881
2.0807
2.0759
1.2931
2.0713
1.7423
3.0200
2.7102
2.5999
1.5614
1.8196
2.0943
2.1923
2.4057
1.8049
1.5076
2.5803
1.8316
1.8238
1.6072
1.8363
2.8800
1.6225
2.2379
1.9086
2.0058
1.5964
3.0622
1.8056
2.0481
2.6230
2.5718
2.4484
4.8848
2.0584
1.7286
2.3303
2.0452
2.5861
2.1619
1.7750
1.7517
2.2799
3.7831
1.9328
3.0274
1.8237
1.9539
1.9688
2.8542
2.1648
1.7796
1.4165
2.0635
1.5512
2.4537
1.8025
1.7956
2.1426
2.3666
2.5232
1.7208
1.4933
2.9103
2.3218
1.7705
2.0426
1.5930
2.3843
2.4137
1.5038
2.4345
1.9328
2.5741
1.9144
2.4423
1.5700
2.3361
1.8594
1.7644
2.2995
1.8335
3.9936
1.6851
3.1330
1.8009
2.0876
2.8069
2.4640
1.9396
1.5216
1.3678
2.1538
1.5096
1.6284
1.9524
1.8641
2.0955
2.0575
1.4833
1.9324
1.9538
1.8318
1.9908
2.0339
2.1765
2.2689
2.0712
2.3893
1.8392
1.7216
1.7257
2.6570
1.5864
1.7469
[torch.FloatTensor of size 512]
), (u'layer4.0.bn2.weight', Parameter containing:
0.4474
0.5138
0.4335
0.3421
0.3855
0.3495
0.3741
0.5836
0.4327
0.5043
0.4618
0.3866
0.3498
0.4798
0.3310
0.3913
0.3880
0.5225
0.3975
0.3292
0.4151
0.4458
0.3970
0.3614
0.3914
0.4633
0.3463
0.3644
0.3272
0.4584
0.4280
0.4538
0.4030
0.4673
0.4209
0.3987
0.4233
0.3876
0.4212
0.3460
0.3522
0.3744
0.4550
0.2888
0.4590
0.4817
0.4450
0.5110
0.4052
0.4247
0.3558
0.3075
0.4462
0.4724
0.4253
0.3884
0.4492
0.3727
0.4630
0.3985
0.3512
0.3665
0.3860
0.5082
0.4022
0.3458
0.4805
0.5390
0.4223
0.4275
0.4590
0.4736
0.3673
0.5405
0.3243
0.5178
0.4743
0.3506
0.3759
0.4328
0.3867
0.4591
0.3843
0.4982
0.5288
0.3946
0.4589
0.3197
0.4676
0.4806
0.4308
0.4235
0.3284
0.3877
0.4140
0.4469
0.4041
0.4407
0.4356
0.5120
0.5059
0.4628
0.4585
0.3311
0.3424
0.4150
0.5170
0.4593
0.5228
0.4252
0.4214
0.4995
0.4098
0.5380
0.4874
0.3719
0.4649
0.4320
0.3277
0.3743
0.4360
0.4838
0.4399
0.3763
0.4150
0.5147
0.5012
0.4382
0.3655
0.4037
0.4498
0.4720
0.3914
0.3237
0.3208
0.3224
0.4291
0.4009
0.3947
0.3779
0.4349
0.4120
0.3274
0.4334
0.3740
0.4189
0.4288
0.3071
0.4260
0.3410
0.4375
0.4407
0.3750
0.5853
0.4518
0.5045
0.3005
0.4968
0.4155
0.3755
0.5514
0.4146
0.4677
0.1404
0.5001
0.4193
0.4246
0.4452
0.5109
0.4488
0.4574
0.3896
0.4145
0.4497
0.4245
0.3971
0.3957
0.4072
0.5305
0.4986
0.3733
0.4280
0.3469
0.4178
0.3766
0.4029
0.3814
0.4493
0.5132
0.4080
0.4155
0.3635
0.4391
0.3489
0.4228
0.4833
0.3494
0.4406
0.3795
0.4298
0.4910
0.3878
0.6299
0.4322
0.5436
0.4140
0.4312
0.3161
0.3612
0.3597
0.4281
0.4506
0.4294
0.3646
0.4110
0.4038
0.4098
0.3901
0.3928
0.5421
0.3629
0.4078
0.4586
0.4217
0.3953
0.3997
0.3838
0.4374
0.3576
0.4217
0.4128
0.3904
0.4137
0.5145
0.4039
0.3577
0.4429
0.5639
0.3848
0.6104
0.4482
0.6203
0.5336
0.3480
0.5401
0.6044
0.4077
0.3469
0.4281
0.4631
0.5948
0.3479
0.3689
0.3658
0.3191
0.5492
0.3410
0.5386
0.4041
0.3373
0.4186
0.5187
0.3933
0.3188
0.3502
0.3736
0.4238
0.4752
0.3322
0.5078
0.4317
0.5318
0.4413
0.5510
0.5648
0.4130
0.4017
0.4304
0.4077
0.4285
0.4360
0.3749
0.4261
0.3905
0.3030
0.3412
0.3768
0.4507
0.3127
0.4592
0.4298
0.3936
0.3106
0.3869
0.3594
0.4046
0.4722
0.4373
0.3902
0.3515
0.4448
0.4299
0.4347
0.4693
0.4807
0.2549
0.4171
0.4387
0.4156
0.3976
0.4092
0.4953
0.4824
0.3468
0.4382
0.4179
0.4668
0.3299
0.5986
0.4949
0.4167
0.4996
0.4528
0.4550
0.4945
0.3415
0.4658
0.4356
0.3976
0.5439
0.4643
0.5122
0.4669
0.4463
0.4810
0.3492
0.3961
0.3593
0.4053
0.3878
0.3959
0.5001
0.2808
0.5470
0.4448
0.4894
0.4621
0.3417
0.3485
0.5060
0.3637
0.3774
0.3248
0.4520
0.3936
0.3403
0.4660
0.4114
0.3643
0.4196
0.3903
0.5128
0.4221
0.4115
0.4240
0.3610
0.4999
0.3672
0.4721
0.4252
0.5590
0.4694
0.7322
0.5849
0.4749
0.4426
0.3934
0.3909
0.4576
0.3636
0.4146
0.4129
0.5081
0.3681
0.3652
0.4254
0.2945
0.4142
0.3145
0.4304
0.4252
0.3493
0.4257
0.5133
0.3261
0.4367
0.3637
0.3712
0.4183
0.3772
0.4418
0.4231
0.4133
0.4731
0.4955
0.4046
0.4079
0.4719
0.3875
0.4673
0.4129
0.4569
0.3530
0.4793
0.3844
0.3785
0.3343
0.4351
0.6512
0.4295
0.4122
0.3788
0.3692
0.4343
0.4214
0.3873
0.4566
0.4456
0.4107
0.4596
0.7082
0.4452
0.3515
0.4785
0.4217
0.5756
0.4312
0.4047
0.4043
0.4764
0.5489
0.4430
0.5559
0.3744
0.3951
0.4376
0.4752
0.4340
0.4399
0.3586
0.4161
0.3930
0.4599
0.4354
0.3448
0.4649
0.4442
0.4275
0.3881
0.3247
0.4909
0.3426
0.3989
0.4320
0.3363
0.3991
0.4732
0.3514
0.4736
0.4244
0.4603
0.3298
0.4357
0.4353
0.3742
0.4191
0.3880
0.4212
0.4527
0.7213
0.3969
0.5217
0.3786
0.3512
0.5318
0.4138
0.3243
0.3244
0.3652
0.4774
0.3997
0.2800
0.4562
0.4463
0.4816
0.4290
0.4399
0.4633
0.3575
0.4774
0.3105
0.4356
0.3797
0.4304
0.4261
0.3740
0.3370
0.3917
0.3637
0.4347
0.5235
0.3845
[torch.FloatTensor of size 512]
), (u'layer4.0.bn2.bias', Parameter containing:
-0.1759
-0.2156
-0.2047
-0.1695
-0.1628
-0.1473
-0.2158
-0.2905
-0.1112
-0.2196
-0.1020
-0.1549
-0.1989
-0.0445
-0.1508
-0.1920
-0.2114
-0.1655
-0.1854
-0.1733
-0.1289
-0.2376
-0.1965
-0.1965
-0.1776
-0.1774
-0.1760
-0.1546
-0.1648
-0.2599
-0.1752
-0.2498
-0.1741
-0.2410
-0.2498
-0.2938
-0.1496
-0.1578
-0.1800
-0.1851
-0.1516
-0.1345
-0.2746
-0.1248
-0.2246
-0.2531
-0.2398
-0.1859
-0.1739
-0.2393
-0.1214
-0.1803
-0.2729
-0.2617
-0.1855
-0.2316
-0.2333
-0.1860
-0.2097
-0.0692
-0.1912
-0.2078
-0.1084
-0.2810
-0.1303
-0.1654
-0.2119
-0.3641
-0.2951
-0.2384
-0.1632
-0.1892
-0.1792
-0.2031
-0.1770
-0.2738
-0.3324
-0.1725
-0.1793
-0.2638
-0.2207
-0.1609
-0.1534
-0.1414
-0.2992
-0.1450
-0.1838
-0.1779
-0.1422
-0.2198
-0.1900
-0.1580
-0.1666
-0.2490
-0.1569
-0.1718
-0.1660
-0.1972
-0.2287
-0.2366
-0.2230
-0.1543
-0.2030
-0.1431
-0.1363
-0.2015
-0.1804
-0.2093
-0.2964
-0.1984
-0.2683
-0.2216
-0.2147
-0.3404
-0.2668
-0.1890
-0.1733
-0.2226
-0.1772
-0.1698
-0.1095
-0.2180
-0.1154
-0.1654
-0.1910
-0.3535
-0.3112
-0.2161
-0.1496
-0.1667
-0.2849
-0.2207
-0.1529
-0.1807
-0.2118
-0.1869
-0.1376
-0.1770
-0.1861
-0.1969
-0.1741
-0.3011
-0.0787
-0.2017
-0.1947
-0.2247
-0.2459
-0.1058
-0.1401
-0.1213
-0.1199
-0.1760
-0.2156
-0.3307
-0.3515
-0.2366
-0.1185
-0.2155
-0.1751
-0.1892
-0.3365
-0.1598
-0.2554
0.0644
-0.2856
-0.1198
-0.1583
-0.2297
-0.3352
-0.1987
-0.2686
-0.1632
-0.2461
-0.2900
-0.2428
-0.1449
-0.1900
-0.2149
-0.1541
-0.2917
-0.2504
-0.2213
-0.0463
-0.1547
-0.1511
-0.1527
-0.1735
-0.1931
-0.1987
-0.2239
-0.2086
-0.2688
-0.1845
-0.1797
-0.1833
-0.3880
-0.1539
-0.1553
-0.1567
-0.2238
-0.1511
-0.2540
-0.2849
-0.1826
-0.2687
-0.2328
-0.2108
-0.2410
-0.1022
-0.1507
-0.1978
-0.1734
-0.2282
-0.0985
-0.1847
-0.1770
-0.1576
-0.1937
-0.1643
-0.2822
-0.1866
-0.2754
-0.2266
-0.2169
-0.1352
-0.2194
-0.1060
-0.2139
-0.1322
-0.1889
-0.2130
-0.1913
-0.2364
-0.1402
-0.2228
-0.2354
-0.1632
-0.1905
-0.1428
-0.1177
-0.2419
-0.2733
-0.2963
-0.1600
-0.3558
-0.3673
-0.2201
-0.1505
-0.2084
-0.0870
-0.2052
-0.2070
-0.1986
-0.2299
-0.0745
-0.1765
-0.1412
-0.2180
-0.1450
-0.1426
-0.1452
-0.2916
-0.0871
-0.1359
-0.2003
-0.1125
-0.2588
-0.1988
-0.2028
-0.2443
-0.0864
-0.3415
-0.2579
-0.2343
-0.3552
-0.1859
-0.1153
-0.1732
-0.1780
-0.1909
-0.2018
-0.1886
-0.2751
-0.1501
0.1165
-0.1891
-0.1845
-0.2037
-0.0339
-0.3464
-0.1956
-0.1962
-0.1537
-0.1902
-0.1431
-0.3022
-0.1780
-0.1971
-0.2118
-0.0952
-0.1711
-0.2409
-0.2184
-0.2114
-0.2042
-0.0566
-0.0700
-0.2081
-0.1872
-0.2079
-0.1540
-0.2266
-0.1981
-0.1679
-0.2022
-0.2010
-0.1051
-0.1705
-0.2139
0.0396
-0.1077
-0.2745
-0.2690
-0.2603
-0.2819
-0.1917
-0.1940
-0.2944
-0.1822
-0.2903
-0.1064
-0.2076
-0.2648
-0.3032
-0.2878
-0.1579
-0.0071
-0.2142
-0.2022
-0.1516
-0.1123
0.0246
-0.0978
-0.1382
-0.1800
-0.3214
-0.2179
-0.1369
-0.0800
0.0117
-0.1839
-0.1926
-0.1614
-0.2769
-0.1909
-0.2101
-0.2305
-0.2055
-0.2017
-0.2741
-0.1005
-0.3152
-0.1121
-0.1700
-0.1364
-0.2157
-0.2673
-0.1584
-0.1997
-0.1745
-0.1886
-0.2307
-0.2024
-0.3376
-0.2266
-0.2355
-0.2133
-0.2346
-0.2412
-0.2358
-0.1265
-0.2341
-0.1887
-0.1646
-0.1417
-0.1882
-0.1076
-0.3048
-0.1162
-0.1651
-0.2046
-0.1833
-0.3102
-0.1778
-0.1575
-0.2676
-0.1777
-0.1569
-0.1741
-0.1892
-0.3028
-0.1457
-0.2179
-0.2226
-0.1609
-0.1423
-0.2683
-0.2920
-0.1740
-0.2079
-0.1940
-0.2679
-0.1973
-0.1951
-0.1665
-0.2286
-0.1903
-0.2667
-0.4010
-0.2550
-0.1817
-0.2025
-0.1589
-0.2476
-0.0573
-0.2203
-0.2084
-0.1587
-0.1212
-0.1795
-0.3449
-0.1662
-0.2523
-0.2435
-0.2878
-0.2797
-0.1897
-0.2113
-0.1943
-0.2050
-0.1694
-0.2243
-0.2987
-0.1328
-0.1428
-0.2399
-0.1593
-0.1999
-0.3225
-0.1860
-0.1763
-0.2691
-0.2097
-0.2396
-0.1140
-0.1897
-0.1870
-0.1829
-0.2615
-0.2073
-0.1858
-0.0598
-0.1915
-0.2183
-0.2088
-0.1742
-0.2715
-0.1999
-0.2117
-0.2492
-0.1717
-0.1566
-0.1669
-0.3015
-0.1685
-0.2434
-0.2297
-0.1947
-0.2860
-0.3288
-0.2197
-0.1862
-0.1755
-0.0987
-0.1756
-0.1304
-0.1555
-0.1679
-0.2222
-0.2819
-0.2652
-0.0947
-0.2412
-0.2731
-0.2572
-0.2604
-0.2934
-0.2470
-0.1820
-0.2740
-0.1336
-0.1698
-0.1919
-0.1796
-0.2325
-0.1352
-0.1077
-0.2184
-0.1539
-0.2015
-0.3243
-0.1713
[torch.FloatTensor of size 512]
), (u'layer4.0.downsample.0.weight', Parameter containing:
( 0 , 0 ,.,.) =
5.6973e-03
( 0 , 1 ,.,.) =
2.0359e-03
( 0 , 2 ,.,.) =
1.6696e-02
...
( 0 ,253,.,.) =
8.4662e-03
( 0 ,254,.,.) =
-2.7450e-02
( 0 ,255,.,.) =
9.6710e-03
⋮
( 1 , 0 ,.,.) =
-2.7123e-02
( 1 , 1 ,.,.) =
-1.5713e-02
( 1 , 2 ,.,.) =
5.4291e-02
...
( 1 ,253,.,.) =
-2.0631e-02
( 1 ,254,.,.) =
-3.0793e-02
( 1 ,255,.,.) =
1.3228e-03
⋮
( 2 , 0 ,.,.) =
-5.2315e-02
( 2 , 1 ,.,.) =
-3.5294e-02
( 2 , 2 ,.,.) =
3.9423e-02
...
( 2 ,253,.,.) =
-3.8161e-02
( 2 ,254,.,.) =
-2.6385e-02
( 2 ,255,.,.) =
-4.4272e-02
...
⋮
(509, 0 ,.,.) =
4.9361e-02
(509, 1 ,.,.) =
4.3553e-02
(509, 2 ,.,.) =
1.0309e-02
...
(509,253,.,.) =
7.1570e-03
(509,254,.,.) =
1.4031e-03
(509,255,.,.) =
-6.6892e-02
⋮
(510, 0 ,.,.) =
5.3341e-02
(510, 1 ,.,.) =
-1.4842e-02
(510, 2 ,.,.) =
-4.8024e-02
...
(510,253,.,.) =
5.4730e-03
(510,254,.,.) =
4.2852e-02
(510,255,.,.) =
1.2923e-02
⋮
(511, 0 ,.,.) =
3.0030e-02
(511, 1 ,.,.) =
-9.1642e-03
(511, 2 ,.,.) =
9.0266e-03
...
(511,253,.,.) =
1.0095e-02
(511,254,.,.) =
-1.1120e-02
(511,255,.,.) =
-7.9560e-03
[torch.FloatTensor of size 512x256x1x1]
), (u'layer4.0.downsample.1.running_mean',
-0.1023
-0.1302
0.0169
0.0539
0.0531
-0.0650
-0.1681
-0.0962
0.0601
-0.0898
-0.0760
-0.0120
0.0480
-0.0867
-0.0415
-0.0887
-0.0378
-0.2376
-0.0965
-0.0434
0.0303
-0.2381
-0.0065
-0.0700
0.0606
0.0257
-0.1691
-0.1207
-0.1550
-0.0927
-0.0012
-0.0962
-0.0922
-0.1789
0.0146
-0.0498
0.0276
-0.1692
0.0259
0.0377
-0.0292
0.0140
-0.0638
-0.0831
0.1049
-0.0554
-0.0551
0.1332
0.0775
-0.1861
-0.0812
-0.2083
-0.0244
-0.0297
-0.0593
0.1243
-0.0475
-0.0014
-0.0069
-0.1002
-0.1040
-0.0837
0.0009
0.0259
-0.0490
-0.0631
0.0193
-0.0375
-0.0487
-0.0803
-0.1123
-0.1538
-0.1031
-0.0858
-0.0706
-0.0725
-0.0903
0.0075
-0.0850
-0.0287
0.0008
0.0249
-0.1068
-0.1237
-0.1271
0.0930
-0.0295
-0.0846
-0.0562
-0.1210
0.0103
-0.1118
-0.0407
-0.0110
-0.0512
-0.1326
0.0454
-0.1072
-0.1018
-0.1699
-0.0338
-0.0950
-0.1897
0.0623
-0.0210
0.0932
-0.0986
0.0823
-0.0911
0.0711
-0.1106
0.0176
-0.0164
-0.2472
-0.1185
-0.0477
-0.0651
0.1771
0.0150
-0.0449
-0.1536
0.0856
0.0214
-0.0775
-0.0115
-0.0189
0.0531
-0.0859
-0.0380
0.0722
-0.0279
0.0244
-0.0680
-0.0434
-0.0344
-0.1618
-0.0222
-0.0492
0.0432
-0.1546
-0.1090
-0.1352
-0.1276
0.0435
0.0177
-0.0656
-0.0611
0.2025
-0.0140
-0.1246
0.0086
-0.0182
0.0312
-0.1482
-0.1866
-0.0051
-0.0816
-0.1819
-0.0977
-0.0378
0.0002
-0.0387
-0.0635
0.1461
-0.1282
-0.0743
0.0310
-0.1228
-0.0425
0.0254
-0.0923
0.0005
-0.0166
-0.1357
-0.0625
0.0060
0.0374
-0.0008
-0.1331
-0.0414
-0.2023
-0.0162
-0.1962
-0.0725
0.0208
-0.0585
-0.1135
-0.1361
-0.1067
-0.1719
0.0145
-0.0390
0.0426
-0.1199
0.0811
-0.0991
-0.1779
-0.0845
0.0010
-0.0083
0.0078
-0.0986
-0.0941
0.0696
-0.1100
-0.1146
0.0178
-0.1711
-0.0144
-0.0282
0.0487
-0.0513
-0.0963
0.0386
-0.1037
0.0128
-0.0490
-0.0292
-0.0553
-0.1402
0.0022
-0.0791
-0.0174
0.0108
-0.0066
0.0250
0.0028
-0.0150
-0.1172
0.0335
0.0034
-0.1005
-0.1735
-0.1138
-0.0804
-0.0329
-0.0286
-0.2133
-0.0151
-0.0876
0.0146
-0.0277
-0.1421
0.0272
0.0350
-0.1483
-0.1306
-0.0596
-0.1365
-0.1003
-0.0083
-0.0906
-0.1012
-0.1426
0.0432
-0.0785
-0.0461
0.0157
0.0150
-0.1290
0.0685
-0.1478
-0.1259
-0.0573
0.0999
-0.0234
-0.1340
0.0173
0.1673
0.0693
0.0070
0.0203
-0.0508
-0.1397
-0.1292
-0.0331
0.0088
0.1208
-0.1808
-0.0149
-0.1302
0.0323
-0.0986
-0.0620
0.0781
0.0809
-0.0918
-0.0450
-0.1246
-0.0485
-0.0756
0.0692
-0.0382
-0.0063
-0.0477
-0.0603
-0.0485
-0.0355
-0.1025
-0.0634
0.1515
-0.1320
-0.0714
0.0402
-0.0342
-0.0085
0.0019
-0.0293
-0.1523
-0.0337
-0.0482
-0.0976
-0.0404
-0.0919
-0.0003
0.0222
-0.0552
-0.0686
0.0319
0.1502
-0.1174
-0.1299
0.0183
-0.0151
-0.1464
-0.0842
-0.0300
-0.0734
-0.0539
-0.1281
0.0408
-0.0897
-0.1408
0.0572
0.0280
-0.0091
-0.1038
-0.0243
-0.0847
-0.0224
-0.0027
-0.1154
-0.0466
0.0305
-0.1060
-0.0092
-0.0748
0.0004
-0.0888
-0.1423
-0.0397
-0.1643
-0.0451
0.0331
0.0008
-0.1542
0.0999
-0.0046
-0.0571
-0.0843
0.0550
-0.1814
-0.1387
-0.0335
0.0472
-0.0325
-0.0034
-0.0210
0.0393
0.0093
-0.0188
-0.0973
0.5186
0.0181
-0.0405
-0.0579
-0.0143
-0.0268
-0.0422
0.0041
-0.0778
-0.0486
0.0359
-0.1563
-0.0826
-0.1485
-0.0987
-0.0028
-0.0243
-0.0655
0.0076
-0.1397
-0.1042
-0.0823
-0.0552
0.0079
-0.0470
0.0660
-0.1063
-0.0572
-0.0552
-0.0801
-0.0892
0.0282
0.1233
-0.2059
-0.0203
-0.0241
0.0828
0.0044
-0.0312
-0.1715
0.0464
-0.0714
0.0321
-0.0967
-0.0669
-0.0344
-0.0770
0.0563
-0.1468
-0.0696
-0.0072
-0.0250
-0.0432
-0.0625
-0.0025
0.0089
-0.0822
-0.1244
0.0708
0.0160
-0.1348
-0.0627
-0.1054
0.1421
-0.0086
-0.0767
-0.1251
-0.0547
-0.1313
-0.0230
0.0155
-0.0489
-0.0013
0.0450
0.0332
-0.0467
-0.1055
0.0485
-0.1123
-0.0773
0.0066
-0.0378
-0.0175
-0.0315
0.0455
-0.1783
-0.0309
-0.0871
-0.0732
-0.0334
-0.0210
0.0869
-0.0567
0.0474
-0.1976
-0.0912
-0.1234
-0.0575
-0.0649
-0.0924
-0.0114
-0.0757
-0.1116
-0.0291
-0.0494
-0.0320
-0.1919
-0.0641
-0.0226
-0.1687
0.0051
-0.1272
0.0922
-0.0861
-0.0604
-0.1110
-0.0010
-0.0269
-0.1265
0.0806
-0.0886
0.0017
-0.0185
-0.0132
-0.0899
-0.1026
-0.0924
-0.0599
-0.0240
-0.0059
0.0808
-0.0403
-0.1129
0.0874
-0.0083
-0.1941
-0.1473
-0.0343
-0.0190
-0.0061
[torch.FloatTensor of size 512]
), (u'layer4.0.downsample.1.running_var',
1.00000e-02 *
1.4797
3.7974
2.4287
3.3282
0.9573
1.2175
3.2409
2.3881
2.1434
1.2457
0.7617
1.4534
2.5956
0.8145
1.6107
1.6402
3.2195
3.4207
2.1148
2.1447
1.3177
3.4486
3.5753
2.3377
1.7639
0.4832
2.8323
1.9312
2.7409
1.7613
2.2178
4.2271
1.6393
1.5593
1.8405
2.1813
1.7244
2.2655
3.4637
1.7027
1.8760
2.6324
2.8153
2.7661
2.3234
1.0893
1.9429
4.7713
1.4600
2.0709
1.4191
2.7877
1.1790
1.8164
2.1013
3.3222
2.2192
2.3825
2.3330
0.9719
2.0101
3.5948
1.0746
1.9807
1.4409
2.5751
2.0639
0.8093
2.4897
1.6369
2.4380
1.5503
1.5689
0.4939
2.4048
1.2735
2.3085
2.6694
2.2144
1.9275
2.1960
1.9823
1.5991
1.2718
1.6494
1.1768
1.6908
2.4666
2.2209
1.7352
1.6695
2.0054
2.4338
2.7665
1.7877
1.7486
3.7940
1.5464
2.0674
1.2233
3.3432
1.5017
1.3682
2.8803
2.8052
3.1367
1.3165
2.6028
1.1627
4.7816
1.9174
1.1349
1.8531
4.4545
1.9873
2.8756
0.8903
5.2529
3.0137
1.7425
1.3181
1.3588
0.3196
1.6511
1.0533
1.7726
1.5709
0.8342
2.8195
1.3471
2.7743
1.2433
1.3966
2.0415
3.0947
2.9389
0.6835
1.7954
1.2979
2.2241
1.7859
2.9926
1.8215
1.1935
2.3874
2.4038
2.4009
3.7302
0.9383
2.6433
1.1903
0.8586
2.9513
1.8345
1.2961
1.6569
2.6276
2.5337
1.5654
2.1501
2.2197
2.3241
1.9577
6.7689
2.4607
0.4769
2.4836
1.6700
1.9427
2.3107
1.7138
1.8203
1.8894
2.4965
2.1896
2.5463
3.6071
1.5943
1.8717
0.6850
4.1029
2.3362
1.9808
1.2859
3.2598
3.0832
2.4954
1.2277
1.7085
1.5440
2.3446
2.9087
5.9264
2.4963
1.2804
2.0356
2.2088
3.2317
4.0116
0.9682
1.0246
2.2698
3.1086
1.7329
2.7773
2.2563
1.2499
2.8079
2.1120
2.3154
1.7160
1.1038
1.0535
1.8923
1.3009
1.4156
2.7678
2.4117
2.1302
1.4714
3.6207
2.8543
1.6743
1.8916
2.6885
1.0043
1.2075
1.4134
3.3789
2.0699
1.0101
1.8902
1.5572
4.9192
1.6134
2.8751
0.9142
4.2635
2.6588
0.4665
0.9972
0.5729
4.9139
2.4250
1.6319
1.4276
1.4179
2.4507
1.2122
2.0003
2.4153
2.7940
2.5884
2.0835
1.2927
2.4182
3.4140
2.1667
0.7899
2.9858
1.3404
2.1888
0.5248
2.4414
3.3217
1.7424
1.7588
2.2876
2.5777
3.6217
1.1590
1.5665
1.6886
2.4274
3.3398
1.6618
1.9122
1.7813
1.5589
1.7732
1.7904
1.9168
1.6683
3.3678
0.4529
1.5886
1.8173
2.2744
0.7121
1.2488
2.3408
3.1028
2.7164
1.5513
1.7717
1.6643
2.9922
2.2554
1.7378
1.4135
4.6231
2.3767
1.3142
2.4729
2.3066
2.3765
0.3310
2.0579
1.2455
1.6946
2.9351
1.9246
2.4107
2.7394
1.1762
1.1401
1.7944
2.3090
2.7987
0.4324
1.2802
1.0422
2.7148
3.0546
1.4914
2.2719
0.7397
1.6942
1.0857
1.4844
1.6265
2.8345
1.9868
1.5381
1.6695
2.1697
0.9911
1.8018
1.6002
1.0949
0.5767
0.5036
2.6319
3.0716
0.9113
1.0563
1.6398
3.0490
1.3609
0.6690
1.9067
1.7289
2.5994
1.5580
2.1489
2.6740
2.5944
2.3086
3.0448
1.7901
1.6307
1.8869
0.8179
1.2594
2.8673
2.7379
1.2914
4.2257
4.4290
0.4725
1.7098
1.0509
3.4835
1.0232
1.0880
0.9897
2.4268
3.1363
2.8433
2.3923
1.8523
2.2239
2.7958
2.1271
1.8237
1.6664
2.8019
2.1324
2.8550
1.7067
1.8597
1.5267
1.9043
1.0217
2.7563
2.8792
2.0045
1.8991
1.1335
2.2008
2.1896
1.9881
1.2837
2.2065
4.3280
1.8434
1.9879
1.2119
1.9007
0.9195
2.2533
0.9538
2.9914
1.1779
1.0417
2.5136
2.4045
1.4470
2.0585
2.0260
1.3212
1.7874
1.6841
1.8557
0.7608
2.6879
1.6277
1.3738
0.5450
1.2819
3.6177
0.8542
3.0353
1.1260
3.8203
1.9922
1.6696
2.0955
1.6163
0.9182
1.0645
1.6338
2.1920
2.2267
1.4893
2.3184
1.4378
1.3713
1.8360
2.0984
2.4619
2.3726
2.2280
1.8140
2.0319
0.3983
1.9480
2.7284
2.8425
2.3305
2.8359
0.6877
1.2102
1.1683
3.8713
1.6494
2.0911
1.9379
1.4409
1.5669
0.9922
3.0646
1.4635
1.4432
4.0783
1.7921
1.8565
1.4059
2.0364
2.4347
2.1271
1.6969
1.6637
1.1990
3.1063
0.5982
2.7176
2.5192
2.5004
1.0163
2.3461
2.9223
1.0756
1.7914
0.9306
2.5531
1.6042
1.2558
1.8730
1.3725
3.0774
1.1870
3.7628
1.7584
1.7254
2.6002
1.8345
1.8618
1.3726
3.4435
1.4385
1.1154
[torch.FloatTensor of size 512]
), (u'layer4.0.downsample.1.weight', Parameter containing:
0.1694
0.3368
0.2993
0.3745
0.1513
0.1781
0.3167
0.3947
0.1858
0.2068
0.1090
0.2042
0.2955
0.0765
0.2023
0.2487
0.3295
0.3349
0.2532
0.2739
0.1661
0.3432
0.3424
0.2969
0.2226
0.0993
0.3328
0.2349
0.2894
0.2296
0.2719
0.3945
0.1990
0.2564
0.2557
0.3541
0.1848
0.2513
0.3101
0.2782
0.2109
0.2441
0.3282
0.3248
0.2499
0.1873
0.2643
0.3949
0.1962
0.2587
0.1708
0.3381
0.2238
0.2498
0.2787
0.3783
0.3445
0.2681
0.2956
0.1146
0.2688
0.3479
0.1295
0.2843
0.1552
0.3026
0.2738
0.1891
0.3568
0.2302
0.2199
0.2070
0.2119
0.0971
0.2482
0.2264
0.3555
0.3113
0.2386
0.2654
0.2975
0.2666
0.2180
0.1451
0.2460
0.1734
0.2358
0.2891
0.2091
0.1971
0.2185
0.2008
0.2461
0.3726
0.2028
0.1993
0.3652
0.2258
0.2606
0.1900
0.2764
0.2011
0.1973
0.2958
0.3222
0.4117
0.1475
0.2674
0.1928
0.3615
0.2774
0.2143
0.2688
0.4286
0.2560
0.2777
0.1339
0.5103
0.3238
0.2417
0.1529
0.1843
0.0579
0.2288
0.1797
0.2803
0.2279
0.1579
0.3196
0.1842
0.3378
0.1688
0.1654
0.3049
0.3533
0.2948
0.1140
0.2503
0.1892
0.2647
0.2405
0.3880
0.1933
0.1918
0.2511
0.2901
0.3151
0.3252
0.1296
0.2491
0.1417
0.1295
0.3062
0.2836
0.3483
0.2306
0.2741
0.2700
0.1873
0.2431
0.3526
0.3546
0.2721
0.2708
0.3065
0.0832
0.2968
0.2286
0.3276
0.2695
0.2452
0.2444
0.2857
0.3365
0.2784
0.2933
0.3397
0.2231
0.2330
0.1486
0.3846
0.3104
0.1724
0.1724
0.3466
0.2978
0.2582
0.1879
0.2419
0.2249
0.2720
0.3735
0.4259
0.3754
0.1731
0.3698
0.2349
0.2694
0.3148
0.1658
0.1181
0.2994
0.4018
0.2126
0.3864
0.2955
0.1848
0.3686
0.1972
0.3265
0.2319
0.1676
0.1756
0.2367
0.2139
0.1974
0.2561
0.2619
0.2170
0.2284
0.3486
0.4500
0.2563
0.2559
0.2814
0.1797
0.1736
0.2013
0.3411
0.2245
0.1385
0.2284
0.2230
0.2566
0.2301
0.3639
0.1380
0.2381
0.2590
0.0830
0.1863
0.1267
0.4501
0.2741
0.2590
0.2782
0.2248
0.2718
0.1949
0.1815
0.2969
0.3168
0.3389
0.2790
0.1594
0.2752
0.2947
0.2909
0.1418
0.3336
0.1953
0.2646
0.0879
0.2553
0.3335
0.1943
0.2777
0.2386
0.3676
0.3042
0.1234
0.2615
0.2548
0.3224
0.3462
0.2090
0.2142
0.2054
0.2115
0.2153
0.2163
0.2509
0.2429
0.3326
-0.0527
0.2244
0.2319
0.2674
0.1103
0.2320
0.2822
0.3234
0.2818
0.2093
0.2261
0.2900
0.3127
0.3456
0.2592
0.1677
0.3924
0.2694
0.1997
0.2973
0.3324
0.2270
0.0656
0.2964
0.1948
0.2383
0.3021
0.2510
0.3117
0.3185
0.1721
0.1867
0.1665
0.2851
0.3512
-0.0486
0.1558
0.2213
0.3281
0.3861
0.2375
0.3057
0.1178
0.2681
0.1921
0.2211
0.1679
0.2877
0.2495
0.2451
0.2678
0.2393
0.0988
0.2778
0.2465
0.1747
0.1005
0.0502
0.2809
0.2810
0.1716
0.2114
0.2213
0.2817
0.1506
0.0769
0.2381
0.2411
0.2942
0.2543
0.2556
0.3451
0.2948
0.3040
0.3204
0.2757
0.1657
0.2941
0.1301
0.1854
0.2866
0.3198
0.2127
0.3608
0.3440
0.0954
0.2586
0.1709
0.2007
0.1967
0.1972
0.1942
0.3201
0.3484
0.3437
0.3153
0.2020
0.3251
0.3227
0.3038
0.2634
0.2364
0.2492
0.3080
0.2591
0.2391
0.2720
0.2601
0.3210
0.1818
0.3526
0.3579
0.2861
0.2526
0.1642
0.2897
0.3996
0.2651
0.2031
0.2502
0.3694
0.2085
0.2804
0.2233
0.2309
0.1609
0.2369
0.2116
0.3549
0.1635
0.1642
0.3072
0.3077
0.2152
0.2821
0.2857
0.1701
0.2305
0.2134
0.3189
0.1061
0.2628
0.2608
0.1749
0.0820
0.1815
0.3566
0.1204
0.3159
0.1595
0.3790
0.3272
0.2086
0.3096
0.2253
0.1456
0.1346
0.2304
0.2913
0.2727
0.2027
0.2688
0.1958
0.2277
0.3036
0.3250
0.3000
0.3328
0.2417
0.2665
0.2473
0.0913
0.2503
0.2543
0.3710
0.3321
0.3693
0.1099
0.1701
0.1758
0.3888
0.2206
0.2766
0.2813
0.1755
0.2616
0.1544
0.2519
0.1945
0.2452
0.3405
0.2446
0.2426
0.1822
0.3002
0.3037
0.3118
0.2414
0.2326
0.1303
0.3081
0.0979
0.2776
0.2918
0.3848
0.1789
0.3622
0.3005
0.1923
0.2672
0.1663
0.2998
0.2710
0.2040
0.2565
0.2289
0.2552
0.2121
0.3532
0.2293
0.2510
0.3085
0.2368
0.3000
0.2111
0.3456
0.3422
0.1576
[torch.FloatTensor of size 512]
), (u'layer4.0.downsample.1.bias', Parameter containing:
-0.1759
-0.2156
-0.2047
-0.1695
-0.1628
-0.1473
-0.2158
-0.2905
-0.1112
-0.2196
-0.1020
-0.1549
-0.1989
-0.0445
-0.1508
-0.1920
-0.2114
-0.1655
-0.1854
-0.1733
-0.1289
-0.2376
-0.1965
-0.1965
-0.1776
-0.1774
-0.1760
-0.1546
-0.1648
-0.2599
-0.1752
-0.2498
-0.1741
-0.2410
-0.2498
-0.2938
-0.1496
-0.1578
-0.1800
-0.1851
-0.1516
-0.1345
-0.2746
-0.1248
-0.2246
-0.2531
-0.2398
-0.1859
-0.1739
-0.2393
-0.1214
-0.1803
-0.2729
-0.2617
-0.1855
-0.2316
-0.2333
-0.1860
-0.2097
-0.0692
-0.1912
-0.2078
-0.1084
-0.2810
-0.1303
-0.1654
-0.2119
-0.3641
-0.2951
-0.2384
-0.1632
-0.1892
-0.1792
-0.2031
-0.1770
-0.2738
-0.3324
-0.1725
-0.1793
-0.2638
-0.2207
-0.1609
-0.1534
-0.1414
-0.2992
-0.1450
-0.1838
-0.1779
-0.1422
-0.2198
-0.1900
-0.1580
-0.1666
-0.2490
-0.1569
-0.1718
-0.1660
-0.1972
-0.2287
-0.2366
-0.2230
-0.1543
-0.2030
-0.1431
-0.1363
-0.2015
-0.1804
-0.2093
-0.2964
-0.1984
-0.2683
-0.2216
-0.2147
-0.3404
-0.2668
-0.1890
-0.1733
-0.2226
-0.1772
-0.1698
-0.1095
-0.2180
-0.1154
-0.1654
-0.1910
-0.3535
-0.3112
-0.2161
-0.1496
-0.1667
-0.2849
-0.2207
-0.1529
-0.1807
-0.2118
-0.1869
-0.1376
-0.1770
-0.1861
-0.1969
-0.1741
-0.3011
-0.0787
-0.2017
-0.1947
-0.2247
-0.2459
-0.1058
-0.1401
-0.1213
-0.1199
-0.1760
-0.2156
-0.3307
-0.3515
-0.2366
-0.1185
-0.2155
-0.1751
-0.1892
-0.3365
-0.1598
-0.2554
0.0644
-0.2856
-0.1198
-0.1583
-0.2297
-0.3352
-0.1987
-0.2686
-0.1632
-0.2461
-0.2900
-0.2428
-0.1449
-0.1900
-0.2149
-0.1541
-0.2917
-0.2504
-0.2213
-0.0463
-0.1547
-0.1511
-0.1527
-0.1735
-0.1931
-0.1987
-0.2239
-0.2086
-0.2688
-0.1845
-0.1797
-0.1833
-0.3880
-0.1539
-0.1553
-0.1567
-0.2238
-0.1511
-0.2540
-0.2849
-0.1826
-0.2687
-0.2328
-0.2108
-0.2410
-0.1022
-0.1507
-0.1978
-0.1734
-0.2282
-0.0985
-0.1847
-0.1770
-0.1576
-0.1937
-0.1643
-0.2822
-0.1866
-0.2754
-0.2266
-0.2169
-0.1352
-0.2194
-0.1060
-0.2139
-0.1322
-0.1889
-0.2130
-0.1913
-0.2364
-0.1402
-0.2228
-0.2354
-0.1632
-0.1905
-0.1428
-0.1177
-0.2419
-0.2733
-0.2963
-0.1600
-0.3558
-0.3673
-0.2201
-0.1505
-0.2084
-0.0870
-0.2052
-0.2070
-0.1986
-0.2299
-0.0745
-0.1765
-0.1412
-0.2180
-0.1450
-0.1426
-0.1452
-0.2916
-0.0871
-0.1359
-0.2003
-0.1125
-0.2588
-0.1988
-0.2028
-0.2443
-0.0864
-0.3415
-0.2579
-0.2343
-0.3552
-0.1859
-0.1153
-0.1732
-0.1780
-0.1909
-0.2018
-0.1886
-0.2751
-0.1501
0.1165
-0.1891
-0.1845
-0.2037
-0.0339
-0.3464
-0.1956
-0.1962
-0.1537
-0.1902
-0.1431
-0.3022
-0.1780
-0.1971
-0.2118
-0.0952
-0.1711
-0.2409
-0.2184
-0.2114
-0.2042
-0.0566
-0.0700
-0.2081
-0.1872
-0.2079
-0.1540
-0.2266
-0.1981
-0.1679
-0.2022
-0.2010
-0.1051
-0.1705
-0.2139
0.0396
-0.1077
-0.2745
-0.2690
-0.2603
-0.2819
-0.1917
-0.1940
-0.2944
-0.1822
-0.2903
-0.1064
-0.2076
-0.2648
-0.3032
-0.2878
-0.1579
-0.0071
-0.2142
-0.2022
-0.1516
-0.1123
0.0246
-0.0978
-0.1382
-0.1800
-0.3214
-0.2179
-0.1369
-0.0800
0.0117
-0.1839
-0.1926
-0.1614
-0.2769
-0.1909
-0.2101
-0.2305
-0.2055
-0.2017
-0.2741
-0.1005
-0.3152
-0.1121
-0.1700
-0.1364
-0.2157
-0.2673
-0.1584
-0.1997
-0.1745
-0.1886
-0.2307
-0.2024
-0.3376
-0.2266
-0.2355
-0.2133
-0.2346
-0.2412
-0.2358
-0.1265
-0.2341
-0.1887
-0.1646
-0.1417
-0.1882
-0.1076
-0.3048
-0.1162
-0.1651
-0.2046
-0.1833
-0.3102
-0.1778
-0.1575
-0.2676
-0.1777
-0.1569
-0.1741
-0.1892
-0.3028
-0.1457
-0.2179
-0.2226
-0.1609
-0.1423
-0.2683
-0.2920
-0.1740
-0.2079
-0.1940
-0.2679
-0.1973
-0.1951
-0.1665
-0.2286
-0.1903
-0.2667
-0.4010
-0.2550
-0.1817
-0.2025
-0.1589
-0.2476
-0.0573
-0.2203
-0.2084
-0.1587
-0.1212
-0.1795
-0.3449
-0.1662
-0.2523
-0.2435
-0.2878
-0.2797
-0.1897
-0.2113
-0.1943
-0.2050
-0.1694
-0.2243
-0.2987
-0.1328
-0.1428
-0.2399
-0.1593
-0.1999
-0.3225
-0.1860
-0.1763
-0.2691
-0.2097
-0.2396
-0.1140
-0.1897
-0.1870
-0.1829
-0.2615
-0.2073
-0.1858
-0.0598
-0.1915
-0.2183
-0.2088
-0.1742
-0.2715
-0.1999
-0.2117
-0.2492
-0.1717
-0.1566
-0.1669
-0.3015
-0.1685
-0.2434
-0.2297
-0.1947
-0.2860
-0.3288
-0.2197
-0.1862
-0.1755
-0.0987
-0.1756
-0.1304
-0.1555
-0.1679
-0.2222
-0.2819
-0.2652
-0.0947
-0.2412
-0.2731
-0.2572
-0.2604
-0.2934
-0.2470
-0.1820
-0.2740
-0.1336
-0.1698
-0.1919
-0.1796
-0.2325
-0.1352
-0.1077
-0.2184
-0.1539
-0.2015
-0.3243
-0.1713
[torch.FloatTensor of size 512]
), (u'layer4.1.conv1.weight', Parameter containing:
( 0 , 0 ,.,.) =
-8.0284e-03 -5.7776e-03 6.4154e-03
5.0498e-03 -6.7796e-03 1.2691e-02
1.3331e-02 1.4523e-02 2.4522e-02
( 0 , 1 ,.,.) =
-1.9876e-03 1.2466e-02 1.0494e-02
-1.9364e-02 -1.6696e-02 -1.1857e-02
-1.1569e-02 -3.7674e-03 -3.4679e-03
( 0 , 2 ,.,.) =
-1.1440e-02 -1.3884e-02 1.1559e-03
-1.7906e-02 -2.9349e-02 -1.3876e-02
-1.4057e-02 -2.6989e-02 -2.3963e-02
...
( 0 ,509,.,.) =
-6.3040e-03 -3.1167e-03 -1.3304e-02
7.1623e-03 6.4669e-03 1.6063e-02
-1.0750e-02 -1.0480e-02 -6.1070e-03
( 0 ,510,.,.) =
7.4484e-03 6.3878e-03 -1.2579e-02
-7.7356e-03 1.8112e-03 -1.7890e-02
-2.9142e-03 7.7705e-03 -9.7314e-03
( 0 ,511,.,.) =
2.1760e-02 2.2364e-02 2.2731e-02
2.6681e-02 2.9127e-02 3.3356e-02
1.2892e-02 -3.5818e-03 5.3022e-03
⋮
( 1 , 0 ,.,.) =
-1.0597e-02 -9.1551e-03 -2.3418e-02
-1.0768e-02 -3.3171e-03 -1.8559e-02
-1.8607e-02 -4.2634e-03 -1.5591e-02
( 1 , 1 ,.,.) =
-2.6090e-02 -2.2517e-02 -3.0593e-02
-3.9406e-02 -2.6639e-02 -2.8202e-02
-2.6143e-02 -1.9647e-02 -2.1466e-02
( 1 , 2 ,.,.) =
-3.5259e-03 1.6623e-03 -6.5624e-03
-5.0597e-03 -8.7162e-04 -5.3742e-03
-7.9651e-03 -9.7778e-03 -1.0736e-02
...
( 1 ,509,.,.) =
1.8492e-02 -3.6799e-03 1.0043e-02
-5.2974e-03 -2.0757e-02 -1.5120e-02
2.1435e-02 6.4916e-03 4.7660e-03
( 1 ,510,.,.) =
-1.8810e-02 -6.0469e-04 -7.6999e-03
-1.7697e-02 -7.8692e-03 -1.6543e-02
-1.7206e-02 -2.4746e-02 -3.0270e-02
( 1 ,511,.,.) =
-3.1191e-02 -1.4363e-02 2.2032e-03
-1.2033e-02 -2.3699e-03 -1.6630e-02
-1.2905e-02 -1.5363e-02 -3.6297e-03
⋮
( 2 , 0 ,.,.) =
-3.2648e-02 -4.8158e-03 -2.0476e-02
-2.5846e-02 -1.4660e-03 -2.8170e-02
-2.6640e-02 4.3022e-03 -2.7636e-02
( 2 , 1 ,.,.) =
-6.3289e-03 -1.5401e-02 -1.3096e-03
-1.7499e-02 -2.6212e-02 -2.3646e-02
-7.3207e-03 -1.5592e-02 -8.9578e-03
( 2 , 2 ,.,.) =
8.9701e-04 -6.6914e-03 -5.3129e-03
-1.1727e-03 -1.0726e-02 -9.0103e-03
3.2311e-03 -4.5854e-03 4.3512e-03
...
( 2 ,509,.,.) =
-2.1822e-02 -3.6889e-02 -2.2588e-02
-1.3054e-02 -3.4191e-02 -2.7238e-02
-1.2383e-02 -2.3452e-02 -2.2486e-02
( 2 ,510,.,.) =
6.8177e-03 2.1561e-02 1.3674e-02
3.1192e-03 1.0660e-02 1.0409e-02
8.0477e-03 -4.6817e-03 -4.3912e-03
( 2 ,511,.,.) =
-1.1983e-02 -1.6201e-02 -2.2626e-02
-1.3461e-02 -7.0928e-03 -1.4384e-02
-2.4456e-02 1.4885e-02 1.2247e-02
...
⋮
(509, 0 ,.,.) =
-2.6347e-02 -2.9923e-02 -3.7810e-02
-1.5663e-02 -4.1126e-03 -1.1482e-02
-1.3415e-02 -1.5432e-02 -1.8204e-02
(509, 1 ,.,.) =
-3.8392e-03 -1.1093e-02 -8.0841e-04
-5.9634e-03 -5.9165e-03 -9.3332e-03
-2.2761e-03 5.4781e-03 -5.6050e-03
(509, 2 ,.,.) =
-1.8406e-03 -2.8134e-03 8.3246e-03
-1.2453e-03 2.1453e-04 7.4868e-03
1.3450e-02 3.0599e-02 2.6405e-02
...
(509,509,.,.) =
3.5268e-04 2.3897e-03 6.2558e-03
-1.4338e-02 -2.3146e-02 -1.9024e-02
-2.7306e-02 -3.0079e-02 -3.1762e-02
(509,510,.,.) =
1.4584e-02 4.3430e-03 1.2053e-02
-6.1130e-03 -2.8539e-02 -1.8268e-02
-1.6844e-02 -4.7816e-02 -2.6274e-02
(509,511,.,.) =
-1.8850e-02 -9.3396e-03 7.8905e-03
-1.5322e-03 8.3153e-03 1.7783e-02
-8.3318e-03 -1.5759e-02 -1.2061e-02
⋮
(510, 0 ,.,.) =
9.9578e-03 7.4573e-03 -1.8738e-03
-1.7752e-03 -6.8015e-04 -7.4443e-03
-1.8319e-02 -1.4264e-02 -7.1446e-03
(510, 1 ,.,.) =
7.8524e-03 -2.6520e-03 -1.7556e-02
4.5240e-03 -4.8661e-03 -1.5215e-02
-5.0211e-03 -1.1864e-02 -1.4846e-02
(510, 2 ,.,.) =
2.9163e-02 1.0344e-02 2.4736e-02
1.2012e-02 -1.0346e-02 3.5472e-03
8.2238e-03 -1.8237e-02 -5.4892e-03
...
(510,509,.,.) =
-8.8434e-03 -4.3184e-03 -5.7536e-03
7.7230e-03 -4.1936e-04 7.7260e-03
1.3536e-02 1.5705e-02 2.0893e-02
(510,510,.,.) =
1.6743e-03 1.9720e-03 2.1567e-02
-8.0074e-03 -4.6606e-03 4.0560e-03
-1.6688e-02 -1.3754e-02 -1.1708e-02
(510,511,.,.) =
-9.7959e-03 -9.4502e-03 -9.3443e-03
6.9547e-03 -3.9134e-05 6.2691e-03
-1.3193e-02 9.3272e-04 1.4579e-02
⋮
(511, 0 ,.,.) =
-1.4963e-03 5.5133e-04 1.1571e-02
1.0174e-02 1.7889e-03 1.1035e-02
7.0212e-03 1.4651e-03 1.2769e-03
(511, 1 ,.,.) =
-1.3021e-02 6.4109e-03 -1.5199e-02
2.4775e-02 2.1926e-02 3.3679e-02
2.6471e-04 -3.0235e-03 1.1690e-02
(511, 2 ,.,.) =
-2.9665e-02 -1.5314e-02 -1.7500e-02
-1.8339e-02 -2.0845e-02 -1.5494e-02
-1.6086e-03 1.0831e-02 -1.4309e-02
...
(511,509,.,.) =
-7.7044e-03 -2.1100e-02 -2.2816e-02
5.7688e-03 1.9362e-04 7.7105e-04
-6.1357e-03 9.7275e-03 -2.5464e-03
(511,510,.,.) =
1.1043e-02 2.4205e-02 3.4213e-02
2.9181e-02 2.6904e-02 4.5372e-02
-2.1594e-02 -1.1072e-03 -7.8312e-03
(511,511,.,.) =
-8.3287e-03 -7.9521e-03 -5.3358e-03
-6.2527e-04 -5.3243e-03 -8.6296e-03
3.6094e-03 -1.2544e-03 -4.3801e-03
[torch.FloatTensor of size 512x512x3x3]
), (u'layer4.1.bn1.running_mean',
-0.6163
-0.7110
-0.4544
-0.6143
-0.7875
-0.5289
-0.6214
-0.5824
-0.4067
-0.5256
-0.5125
-0.5337
-0.5658
-0.6845
-0.5146
-0.6323
-0.5391
-0.6838
-0.6577
-0.4716
-0.5249
-0.4059
-0.6028
-0.5246
-0.8913
-0.6528
-0.5048
-0.5008
-0.5067
-0.5973
-0.5524
-0.6383
-0.6782
-0.7263
-0.5725
-0.6456
-0.7138
-0.6146
-0.5346
-0.5177
-0.4593
-0.6618
-0.5113
-0.5810
-0.6244
-0.5531
-0.5563
-0.5481
-0.6089
-0.4626
-0.7311
-0.6632
-0.6316
-0.7381
-0.6137
-0.6237
-0.4964
-0.4489
-0.6428
-0.5968
-0.5530
-0.3822
-0.6110
-0.4530
-0.4679
-0.5221
-0.6356
-0.4861
-0.7772
-0.5096
-0.5782
-0.7054
-0.6667
-0.6244
-0.7292
-0.7311
-0.6557
-0.6871
-0.6767
-0.5567
-0.6059
-0.6680
-0.7162
-0.6105
-0.5778
-0.6501
-0.6248
-1.0751
-0.6764
-0.7277
-0.5838
-0.5172
-0.6578
-0.6765
-0.7828
-0.5213
-0.5852
-0.6051
-0.6174
-0.6495
-0.7123
-0.6940
-0.5532
-0.6595
-0.5406
-0.7931
-0.5718
-0.5847
-0.6132
-0.6935
-0.6868
-0.6694
-0.5328
-0.6436
-0.6820
1.0943
-0.7218
-0.5527
-0.6364
-0.7036
-0.4954
-0.7242
-0.5977
-0.4918
-0.6130
-0.5662
-0.5606
-0.5216
-0.6229
-0.6632
-0.6878
-0.5284
-0.6767
-0.6877
-0.4909
-0.5646
-0.5312
-0.5946
-0.4761
-0.4790
-0.4377
-0.5075
-0.7755
-0.6303
-0.7138
-0.6351
-0.4867
-0.6949
-0.5841
-0.5315
-0.7750
-0.4143
-0.6275
-0.7366
-0.4332
-0.5265
-0.5596
-0.7054
-0.5708
-0.6828
-0.5689
-0.6370
-0.6888
-0.6580
-0.7045
-0.5881
-0.5664
-0.7268
-0.4533
-0.5892
-0.3759
-0.5606
-0.4196
-0.6223
-0.5858
-0.6233
-0.5599
-0.6126
-0.6092
-0.5808
-0.6016
-0.6788
-0.6251
-0.5953
-0.6111
-0.5539
-0.6734
-0.7272
-0.6097
-0.5319
-0.6154
-0.6616
-0.5251
-0.7204
-0.5141
-0.5327
0.4233
-0.5529
-0.5242
-0.6593
-0.4511
-0.6349
-0.3456
-0.6631
-0.5920
-0.5973
-0.6211
-0.7120
-0.6314
-0.5040
-0.6516
-0.6550
-0.7142
-0.5808
-0.6789
-0.6201
-0.4061
-0.5925
-0.5558
-0.5571
-0.4889
-0.5365
-0.5812
-0.4340
-0.6515
-0.7659
-0.7258
-0.6003
-0.5486
-0.5736
-0.6739
-0.7918
-0.7040
-0.7296
-0.5405
-0.7658
-0.7979
-0.4340
-0.4951
-0.6078
-0.5947
-0.4997
-0.6277
-0.7792
-0.5649
-0.5368
-0.5487
-0.4484
-0.5827
-0.6600
-0.6217
-0.4814
-0.5351
-0.7170
-0.6168
-0.5111
-0.5243
-0.5335
-0.5807
-0.5547
-0.6503
-0.4587
-0.5209
-0.6062
-0.6173
-0.5516
-0.6006
-0.6239
-0.5478
-0.4163
-0.4006
-0.6529
-0.6296
-0.6285
-0.6954
-0.6962
-0.6200
-0.6101
-0.7406
-0.5545
-0.6118
-0.4561
-0.7019
-0.4928
-0.6741
-0.5356
-0.5084
-0.5760
-0.7814
-0.6067
-0.5649
-0.7067
-0.5240
-0.5461
-0.6110
-0.4931
-0.5836
-0.5622
-0.6459
-0.6308
-0.9025
-0.7854
-0.5653
-0.7293
-0.5926
-0.5592
-0.7440
-0.7482
-0.6664
-0.7048
-0.7439
-0.5878
-0.8028
-0.6192
-0.4616
-0.5855
-0.6742
-0.7888
-0.6651
-0.3683
-0.6134
-0.4842
-0.3682
-0.6536
-0.7631
-0.6092
-0.4839
-0.6507
-0.3905
-0.6862
-0.4401
-0.6525
-0.5656
-0.6717
-0.7421
-0.7229
-0.4674
-0.5751
-0.5511
-0.5586
-0.5558
-0.4956
-0.7403
-0.6855
-0.3486
-0.6537
-0.4844
-0.6349
-0.5598
-0.5948
-0.4812
-0.5334
-0.5976
-0.6261
-0.6772
-0.7516
-0.5493
-0.3516
-0.5009
-0.6872
-0.5960
-0.7015
-0.7018
-0.6167
-0.6467
-0.6626
-0.6280
-0.5683
-0.4376
-0.5755
-0.4337
-0.7240
-0.6438
-0.7462
-0.5832
-0.4346
-0.4831
-0.7087
-0.6292
-0.5748
-0.6452
-0.6257
-0.5396
-0.6186
-0.6352
-0.8040
-0.6863
-0.5090
-0.5783
-0.4635
-0.6626
-0.6057
-0.5455
-0.6903
-0.4256
-0.6280
-0.6005
-0.6254
-0.6561
-0.6311
0.0700
-0.5386
-0.6303
-0.6126
-0.5625
-0.7177
-0.7150
-0.4435
-0.5034
-0.4601
-0.7134
-0.6045
-0.6175
-0.1311
-0.4606
-0.5279
-0.4703
-0.6170
-0.5399
-0.6533
-0.4893
-0.6054
-0.5828
-0.4321
-0.5590
-0.6121
-0.4322
-0.5652
-0.5552
-0.5527
-0.7014
-0.8307
-0.7236
-0.5809
-0.5889
-0.3087
-0.5081
-0.6251
-0.5052
-0.5977
-0.4824
-0.5811
-0.5692
-0.5516
-0.7365
-0.5498
-0.6327
-0.5069
-0.7376
-0.6093
-0.4489
-0.4150
-0.8244
-0.6467
-0.6521
-0.7903
-0.5506
-0.5928
-0.5616
-0.5244
-0.7687
-0.4464
-0.5756
-0.6635
-0.8333
-0.5849
-0.6492
-0.6747
-0.5918
-0.7004
-0.5709
-0.6782
-0.5677
-0.5274
-0.6032
-0.7329
-0.7639
-0.7142
-0.5434
-0.5954
-0.3667
-0.8357
-0.5958
-0.5528
-0.7045
-0.7305
-0.5834
-0.6888
-0.6295
-0.4671
-0.4950
-0.3886
-0.7052
-0.7428
-0.4397
-0.6197
-0.6044
-0.7213
-0.7726
-0.5705
-0.5343
-0.6056
-0.5059
-0.7181
-0.6812
-0.6400
-0.6280
-0.6755
-0.6645
-0.6709
-0.6787
-0.7700
[torch.FloatTensor of size 512]
), (u'layer4.1.bn1.running_var',
0.1573
0.1215
0.1109
0.1383
0.1542
0.1192
0.1380
0.1565
0.1040
0.1227
0.1210
0.1088
0.1507
0.1229
0.1182
0.1031
0.1236
0.1420
0.2725
0.1137
0.1290
0.1197
0.1203
0.1205
0.1339
0.1468
0.0918
0.1423
0.1281
0.1053
0.1406
0.1438
0.1694
0.1591
0.1215
0.1267
0.1491
0.1544
0.1516
0.1206
0.1409
0.1487
0.1340
0.1212
0.1692
0.1352
0.1200
0.1342
0.1134
0.1254
0.1213
0.1793
0.1328
0.1295
0.1582
0.1491
0.1182
0.1430
0.1205
0.1282
0.1373
0.1034
0.1206
0.0871
0.0848
0.1330
0.2006
0.1261
0.1236
0.1034
0.1263
0.1355
0.1411
0.1224
0.1370
0.1594
0.1325
0.1217
0.1415
0.1338
0.1612
0.1261
0.1384
0.1487
0.1365
0.1152
0.1112
0.2107
0.1501
0.1499
0.1466
0.1247
0.1303
0.1796
0.1399
0.1382
0.1268
0.1446
0.1197
0.1814
0.1356
0.1555
0.1191
0.1518
0.1481
0.1677
0.1664
0.1375
0.1302
0.1575
0.1044
0.1489
0.1323
0.1657
0.1363
0.1650
0.1388
0.1572
0.1097
0.1532
0.1118
0.1582
0.1240
0.1371
0.1479
0.1441
0.1202
0.1072
0.1245
0.1441
0.1704
0.1055
0.1728
0.1529
0.1170
0.1075
0.1315
0.1389
0.1054
0.1395
0.1459
0.1136
0.1329
0.1357
0.1532
0.1205
0.1302
0.1067
0.1683
0.1299
0.1858
0.1210
0.1353
0.1287
0.1354
0.1247
0.1327
0.1452
0.1283
0.1384
0.1480
0.1390
0.1259
0.1230
0.1613
0.1562
0.1297
0.1444
0.1217
0.1486
0.1254
0.1371
0.1776
0.1259
0.1266
0.1432
0.1145
0.1126
0.1514
0.1623
0.1124
0.1401
0.1290
0.1095
0.1299
0.1432
0.1534
0.1387
0.1410
0.0994
0.1383
0.1388
0.1094
0.1412
0.1450
0.1345
0.1807
0.1669
0.1797
0.1446
0.1243
0.1500
0.1966
0.1428
0.1178
0.1329
0.1239
0.1487
0.1447
0.1233
0.1230
0.1421
0.1380
0.1261
0.1041
0.1362
0.1259
0.1976
0.1114
0.1175
0.0917
0.1185
0.1305
0.1413
0.1461
0.1311
0.1972
0.1353
0.1068
0.1346
0.1844
0.1347
0.1281
0.1329
0.1151
0.1365
0.1553
0.1578
0.1092
0.1538
0.1433
0.1616
0.1567
0.1194
0.1202
0.1098
0.1202
0.1117
0.1285
0.1747
0.1419
0.1255
0.1801
0.1364
0.1383
0.1418
0.1395
0.1644
0.1854
0.1917
0.1479
0.1276
0.1312
0.1268
0.1060
0.1379
0.1168
0.1260
0.1124
0.1565
0.1202
0.2099
0.1336
0.1320
0.1521
0.1538
0.1600
0.1196
0.1402
0.1164
0.1405
0.1027
0.1246
0.1134
0.1128
0.1233
0.1620
0.1926
0.1764
0.1251
0.1078
0.1889
0.1207
0.1188
0.1135
0.1182
0.1275
0.1302
0.1226
0.1329
0.2516
0.1389
0.1333
0.1921
0.1150
0.1574
0.1364
0.1345
0.1353
0.1439
0.1305
0.1363
0.2558
0.1277
0.1620
0.1404
0.1460
0.1436
0.2171
0.1201
0.1373
0.1306
0.1311
0.1308
0.1287
0.1161
0.2168
0.1480
0.1095
0.1442
0.1021
0.1147
0.1490
0.1643
0.1194
0.1053
0.1203
0.1042
0.1046
0.1572
0.1197
0.1417
0.1291
0.1438
0.1530
0.1424
0.1470
0.1178
0.1345
0.1161
0.1184
0.1133
0.1149
0.1338
0.1574
0.1732
0.1222
0.1317
0.1651
0.1592
0.1255
0.1714
0.1386
0.1274
0.1710
0.1602
0.1427
0.1191
0.1423
0.1244
0.1242
0.1345
0.1228
0.1578
0.1384
0.1870
0.1142
0.1692
0.1325
0.1416
0.1499
0.1259
0.1202
0.1246
0.1744
0.1611
0.1277
0.1228
0.1276
0.1105
0.1708
0.1367
0.1220
0.1172
0.1324
0.1184
0.2493
0.1403
0.1269
0.1254
0.1589
0.1228
0.1548
0.1620
0.1270
0.1219
0.1219
0.1449
0.1133
0.1800
0.1959
0.1330
0.1314
0.1067
0.1207
0.1086
0.1430
0.1113
0.1375
0.1652
0.1293
0.1261
0.1513
0.1269
0.1334
0.1213
0.1117
0.1366
0.1399
0.1310
0.1452
0.1476
0.1330
0.1173
0.1508
0.1389
0.1231
0.1381
0.1656
0.1119
0.1386
0.1126
0.1373
0.1122
0.1230
0.0956
0.1402
0.1565
0.1408
0.1206
0.1396
0.1236
0.1630
0.1353
0.1303
0.2084
0.1242
0.1122
0.1314
0.1330
0.1390
0.1695
0.1342
0.1609
0.1675
0.1445
0.1422
0.1566
0.1629
0.1685
0.1337
0.1291
0.1315
0.1648
0.1187
0.1312
0.1344
0.1582
0.1085
0.1148
0.1341
0.1757
0.1398
0.1169
0.1368
0.1972
0.1132
0.1293
0.1359
0.1159
0.1204
0.1141
0.1512
0.1578
0.1483
0.1096
0.1363
0.1530
0.1410
0.1818
0.1212
0.1304
0.1415
0.1201
0.1561
0.1149
0.1250
0.1123
0.1161
0.1466
0.1419
0.1714
[torch.FloatTensor of size 512]
), (u'layer4.1.bn1.weight', Parameter containing:
0.2587
0.3073
0.2595
0.3223
0.2662
0.2652
0.2575
0.2660
0.2766
0.2414
0.3045
0.2853
0.2821
0.2880
0.3094
0.3444
0.3155
0.4129
0.2110
0.2903
0.2496
0.2601
0.2967
0.3033
0.4152
0.2719
0.3661
0.3251
0.3898
0.3346
0.2753
0.2712
0.2414
0.3351
0.3394
0.3167
0.3360
0.2666
0.2109
0.2705
0.2587
0.3070
0.2720
0.2316
0.2885
0.2884
0.2955
0.3057
0.3043
0.2596
0.2673
0.1929
0.3136
0.3593
0.2622
0.2931
0.3295
0.2514
0.3208
0.2798
0.3259
0.2939
0.2390
0.3105
0.3471
0.2812
0.2148
0.2997
0.3061
0.2740
0.2791
0.3790
0.3592
0.3247
0.2995
0.2735
0.3356
0.2703
0.3255
0.3127
0.2783
0.2702
0.3900
0.2942
0.2899
0.3461
0.3432
0.4685
0.2634
0.2553
0.3019
0.3961
0.2742
0.2995
0.3858
0.2785
0.3212
0.3109
0.3642
0.2193
0.2643
0.2333
0.3151
0.3102
0.2936
0.2374
0.2419
0.2976
0.3335
0.2619
0.3984
0.2721
0.2718
0.2678
0.2757
0.2445
0.3508
0.2174
0.3309
0.2653
0.2564
0.1748
0.3177
0.2751
0.2067
0.2905
0.2762
0.3329
0.2738
0.3224
0.2199
0.2997
0.2206
0.3213
0.2760
0.3927
0.3174
0.2698
0.2988
0.2610
0.2550
0.2788
0.4445
0.2862
0.3606
0.3279
0.2869
0.3294
0.2244
0.2338
0.1754
0.2318
0.3186
0.3322
0.2255
0.3041
0.2837
0.3276
0.2392
0.3668
0.1971
0.2946
0.3613
0.2736
0.2554
0.2860
0.2511
0.3490
0.3253
0.2934
0.2027
0.2580
0.2200
0.3089
0.3074
0.3332
0.2943
0.3375
0.2330
0.2611
0.3383
0.2837
0.3546
0.3093
0.3791
0.2197
0.2648
0.2830
0.2587
0.3588
0.2830
0.3971
0.3194
0.3066
0.2754
0.2647
0.0970
0.2182
0.2334
0.2624
0.1829
0.2933
0.2747
0.3001
0.2996
0.3107
0.3256
0.2940
0.3901
0.2790
0.3030
0.2838
0.3010
0.3044
0.3479
0.3087
0.2611
0.1958
0.2941
0.2558
0.2889
0.3148
0.2516
0.2664
0.2862
0.3940
0.2933
0.2781
0.3796
0.3022
0.2583
0.3021
0.2784
0.2967
0.2994
0.3856
0.3277
0.2587
0.2539
0.2824
0.2634
0.1489
0.2205
0.3929
0.3401
0.2717
0.2789
0.2917
0.3177
0.1992
0.3684
0.3120
0.3201
0.2810
0.2302
0.2779
0.2865
0.2858
0.2713
0.1601
0.2496
0.2895
0.3154
0.3443
0.3285
0.3444
0.3251
0.3235
0.3375
0.2282
0.2128
0.1795
0.3077
0.3005
0.2775
0.3054
0.2914
0.3535
0.2871
0.2669
0.3961
0.2674
0.3898
0.3183
0.3242
0.2789
0.1911
0.2569
0.3427
0.2464
0.2778
0.2098
0.3019
0.3145
0.3271
0.2914
0.2619
0.2643
0.3039
0.2520
0.2099
0.3643
0.2915
0.1957
0.3286
0.2355
0.3210
0.2982
0.3388
0.3450
0.3716
0.2898
0.2846
0.2805
0.2219
0.2910
0.2681
0.3163
0.1964
0.3176
0.3092
0.2706
0.2505
0.2508
0.3166
0.3583
0.1563
0.2608
0.2892
0.3401
0.2891
0.3126
0.2172
0.2459
0.2651
0.4052
0.2986
0.3026
0.3773
0.2262
0.2675
0.2900
0.3759
0.3201
0.2567
0.3443
0.2348
0.3057
0.2347
0.3277
0.2938
0.2746
0.2805
0.2421
0.3590
0.2622
0.2773
0.2396
0.2134
0.2727
0.2984
0.2744
0.2591
0.2628
0.3568
0.2009
0.3220
0.2868
0.2561
0.3113
0.2138
0.3136
0.2745
0.3046
0.3042
0.1972
0.2815
0.2542
0.2983
0.2613
0.2668
0.3142
0.2930
0.3800
0.1966
0.2948
0.3363
0.2713
0.3625
0.2909
0.2695
0.3111
0.3242
0.3009
0.3231
0.3051
0.2012
0.2716
0.3692
0.2694
0.1481
0.2858
0.2819
0.2391
0.2867
0.3466
0.3431
0.2365
0.3357
0.1685
0.2925
0.3092
0.3127
0.1883
0.2561
0.3086
0.1732
0.2989
0.3235
0.2693
0.2630
0.2913
0.2786
0.3124
0.3098
0.2695
0.2403
0.2906
0.2784
0.2654
0.3485
0.3939
0.3033
0.3145
0.2622
0.1540
0.2790
0.2967
0.1954
0.2632
0.2957
0.2581
0.3231
0.2795
0.2859
0.3139
0.2488
0.2404
0.3714
0.2649
0.2267
0.2878
0.3462
0.3063
0.3180
0.1726
0.3153
0.2625
0.3020
0.2996
0.3632
0.1541
0.3192
0.2200
0.2894
0.2622
0.2534
0.2935
0.3208
0.2231
0.2743
0.3023
0.2829
0.2394
0.2506
0.3512
0.3366
0.2666
0.2930
0.3049
0.2321
0.3397
0.2727
0.2900
0.3146
0.2682
0.3094
0.3718
0.3387
0.3202
0.2423
0.2745
0.2966
0.2500
0.2329
0.3419
0.2928
0.3536
0.3739
0.1935
0.2670
0.2846
0.2583
0.3783
0.2826
0.2929
0.2728
0.3645
0.2770
0.2756
0.2523
0.2500
[torch.FloatTensor of size 512]
), (u'layer4.1.bn1.bias', Parameter containing:
-0.1668
-0.3019
-0.2187
-0.2917
-0.1971
-0.2325
-0.1869
-0.1857
-0.2474
-0.1629
-0.2448
-0.2508
-0.1895
-0.2651
-0.3250
-0.3811
-0.2953
-0.4963
-0.0294
-0.2724
-0.2007
-0.2220
-0.2945
-0.2579
-0.5152
-0.1994
-0.5016
-0.2736
-0.4528
-0.3968
-0.2281
-0.1772
-0.1293
-0.2655
-0.3252
-0.3232
-0.3337
-0.1901
-0.0692
-0.2196
-0.2132
-0.2565
-0.1646
-0.1567
-0.2087
-0.2178
-0.2480
-0.2767
-0.3071
-0.1988
-0.1985
-0.0235
-0.2458
-0.4156
-0.1660
-0.1923
-0.3328
-0.1481
-0.3047
-0.2277
-0.3182
-0.2744
-0.1643
-0.3365
-0.4050
-0.2082
-0.0621
-0.2671
-0.2809
-0.2185
-0.2148
-0.4465
-0.3376
-0.3213
-0.2921
-0.1998
-0.3369
-0.2092
-0.2831
-0.2893
-0.1719
-0.2189
-0.4016
-0.2484
-0.2070
-0.3849
-0.3753
-0.5874
-0.1637
-0.1748
-0.2217
-0.5067
-0.2496
-0.2117
-0.4291
-0.1944
-0.3089
-0.2621
-0.4096
-0.0602
-0.2009
-0.1316
-0.3336
-0.2627
-0.2320
-0.0910
-0.1560
-0.2889
-0.3286
-0.1628
-0.5128
-0.2036
-0.1726
-0.1844
-0.2285
-0.1925
-0.3432
-0.0929
-0.3138
-0.1912
-0.1926
-0.0342
-0.3268
-0.1699
-0.0828
-0.2417
-0.2069
-0.3870
-0.2210
-0.2867
-0.0526
-0.3092
-0.0655
-0.2594
-0.2160
-0.5062
-0.2905
-0.2125
-0.3124
-0.2128
-0.1946
-0.2520
-0.5475
-0.2321
-0.3350
-0.3473
-0.2158
-0.3603
-0.0759
-0.1472
-0.0327
-0.1404
-0.3128
-0.3063
-0.1120
-0.2664
-0.2700
-0.3112
-0.1519
-0.3843
-0.0645
-0.2373
-0.4227
-0.2546
-0.1611
-0.2350
-0.1524
-0.3494
-0.3453
-0.2081
-0.0918
-0.2025
-0.1246
-0.2533
-0.2768
-0.3156
-0.2530
-0.3957
-0.0981
-0.1257
-0.3697
-0.2333
-0.3664
-0.2829
-0.4320
-0.0836
-0.1583
-0.2395
-0.1818
-0.4408
-0.2376
-0.4450
-0.3232
-0.2787
-0.1858
-0.2137
0.0481
-0.1058
-0.1093
-0.2035
-0.0496
-0.2117
-0.1598
-0.2389
-0.2830
-0.2878
-0.3406
-0.2560
-0.4468
-0.2444
-0.2492
-0.2222
-0.2792
-0.3005
-0.4180
-0.2568
-0.1872
-0.0270
-0.2645
-0.1873
-0.3022
-0.3400
-0.1803
-0.1810
-0.2079
-0.4775
-0.2047
-0.1878
-0.4504
-0.2516
-0.1657
-0.2765
-0.2329
-0.2446
-0.2956
-0.4163
-0.2816
-0.1571
-0.2199
-0.2125
-0.1684
0.0356
-0.0914
-0.4484
-0.3535
-0.2212
-0.2550
-0.2509
-0.2702
-0.0599
-0.3505
-0.2924
-0.2360
-0.2339
-0.1259
-0.2597
-0.2267
-0.1978
-0.1371
-0.0129
-0.1175
-0.2527
-0.3099
-0.3231
-0.3468
-0.3553
-0.3537
-0.3315
-0.3713
-0.1091
-0.0959
-0.0258
-0.2756
-0.2808
-0.2012
-0.2812
-0.1991
-0.3948
-0.2257
-0.2469
-0.4211
-0.2110
-0.4670
-0.3069
-0.3549
-0.2337
-0.0612
-0.1321
-0.2968
-0.1870
-0.2316
-0.0686
-0.3113
-0.2895
-0.3149
-0.2686
-0.2081
-0.2096
-0.3011
-0.1810
-0.0227
-0.3873
-0.2665
-0.0225
-0.2973
-0.0973
-0.2980
-0.3219
-0.2926
-0.3196
-0.4332
-0.1980
-0.2117
-0.2302
-0.0980
-0.2344
-0.2154
-0.2921
-0.0350
-0.3361
-0.2620
-0.2188
-0.1566
-0.1795
-0.2726
-0.4103
0.0413
-0.1507
-0.2552
-0.3137
-0.2466
-0.2961
-0.0938
-0.1481
-0.2129
-0.5480
-0.2915
-0.2802
-0.5077
-0.1306
-0.1862
-0.2400
-0.4362
-0.3017
-0.1633
-0.3447
-0.1047
-0.2846
-0.1244
-0.3036
-0.2404
-0.2333
-0.2494
-0.1866
-0.3294
-0.1677
-0.2540
-0.1295
-0.0512
-0.1966
-0.2801
-0.1702
-0.1879
-0.1850
-0.3274
-0.0369
-0.2979
-0.2612
-0.1889
-0.3270
-0.1377
-0.2787
-0.2201
-0.2417
-0.2834
-0.0555
-0.2538
-0.1040
-0.2660
-0.1644
-0.1723
-0.2672
-0.2797
-0.4214
-0.0378
-0.2386
-0.3498
-0.2435
-0.4348
-0.2554
-0.1719
-0.2836
-0.3316
-0.2787
-0.2879
-0.2640
-0.0560
-0.1789
-0.4195
-0.2152
0.0567
-0.2359
-0.2249
-0.0911
-0.2644
-0.3875
-0.3317
-0.1415
-0.3425
-0.0020
-0.1941
-0.2821
-0.2809
-0.0965
-0.1841
-0.2971
-0.0173
-0.3043
-0.3013
-0.1729
-0.1872
-0.2683
-0.2033
-0.3059
-0.2939
-0.2163
-0.1889
-0.2581
-0.2296
-0.2066
-0.3462
-0.4298
-0.2600
-0.3095
-0.1800
-0.0116
-0.2124
-0.2552
-0.0523
-0.2216
-0.2605
-0.2134
-0.2867
-0.2556
-0.2275
-0.3437
-0.1698
-0.1560
-0.4120
-0.2067
-0.1159
-0.2408
-0.3093
-0.2621
-0.2593
-0.0135
-0.3099
-0.2179
-0.2766
-0.2400
-0.3934
0.0072
-0.2982
-0.0930
-0.2166
-0.1635
-0.1827
-0.2308
-0.2525
-0.0991
-0.2325
-0.2938
-0.2480
-0.0934
-0.1911
-0.3772
-0.3369
-0.1606
-0.2752
-0.3005
-0.1372
-0.2990
-0.2156
-0.2622
-0.3160
-0.1342
-0.2903
-0.3865
-0.2916
-0.3243
-0.2051
-0.2656
-0.2359
-0.1508
-0.1063
-0.3595
-0.2312
-0.3046
-0.4178
-0.0276
-0.2204
-0.2426
-0.1616
-0.4789
-0.1713
-0.2802
-0.2305
-0.4327
-0.2413
-0.1862
-0.1486
-0.1507
[torch.FloatTensor of size 512]
), (u'layer4.1.conv2.weight', Parameter containing:
( 0 , 0 ,.,.) =
2.8729e-04 4.2632e-03 -2.0266e-03
1.9513e-04 2.4381e-03 -5.8632e-03
4.4803e-03 8.6577e-03 8.5538e-04
( 0 , 1 ,.,.) =
-1.1335e-02 -1.3195e-02 -1.0305e-02
-4.9507e-03 -4.5898e-03 -3.1041e-03
-7.5883e-03 -8.3795e-03 -8.9239e-03
( 0 , 2 ,.,.) =
-1.1914e-02 -1.2104e-02 -1.0167e-02
-1.2093e-02 -1.1557e-02 -8.9600e-03
-1.2515e-02 -9.3296e-03 -6.4079e-03
...
( 0 ,509,.,.) =
-9.3573e-03 -1.0662e-02 -1.2672e-02
-8.0600e-03 -8.5423e-03 -1.2121e-02
-8.1498e-03 -8.8037e-03 -1.0611e-02
( 0 ,510,.,.) =
4.2632e-03 5.6461e-03 2.8460e-03
4.7070e-03 6.2550e-03 7.5862e-03
1.1504e-02 1.1518e-02 1.0728e-02
( 0 ,511,.,.) =
-6.2455e-03 -9.1693e-03 -9.6664e-03
-4.2935e-03 -6.5311e-03 -5.0513e-03
-3.1141e-03 -5.0124e-03 -5.8122e-03
⋮
( 1 , 0 ,.,.) =
2.7483e-03 3.7146e-04 3.3262e-05
-4.5675e-03 -6.6689e-03 -6.4447e-03
-6.7610e-03 -7.3204e-03 -9.5855e-03
( 1 , 1 ,.,.) =
-1.4630e-02 -1.2320e-02 -1.4457e-02
-8.6197e-03 -5.8059e-03 -1.1075e-02
-6.2154e-03 -6.8218e-03 -9.3805e-03
( 1 , 2 ,.,.) =
1.0879e-03 4.3850e-04 -1.9456e-03
-1.2517e-03 3.2917e-04 -2.1435e-03
4.8136e-03 2.5333e-03 5.1504e-03
...
( 1 ,509,.,.) =
2.4644e-02 1.7434e-02 2.0734e-02
2.3101e-02 1.3487e-02 2.0728e-02
1.9381e-02 1.5243e-02 1.7340e-02
( 1 ,510,.,.) =
1.2212e-02 1.2448e-02 1.5048e-02
5.2993e-03 4.0090e-03 9.3927e-03
6.6766e-03 2.4941e-03 8.3288e-03
( 1 ,511,.,.) =
3.1040e-02 2.8243e-02 3.2319e-02
3.8608e-02 3.3099e-02 3.8652e-02
2.5839e-02 2.6524e-02 2.4995e-02
⋮
( 2 , 0 ,.,.) =
-2.1761e-03 4.5553e-03 2.0612e-03
4.9747e-03 1.1420e-02 8.5734e-03
4.8583e-03 1.1469e-02 1.0039e-02
( 2 , 1 ,.,.) =
-6.2547e-05 6.5336e-04 9.4747e-04
5.0603e-03 7.7136e-03 6.5484e-03
-4.8432e-04 2.3057e-03 2.9219e-03
( 2 , 2 ,.,.) =
-3.2788e-02 -2.7615e-02 -3.2608e-02
-3.6296e-02 -2.8170e-02 -3.0277e-02
-3.6814e-02 -3.1547e-02 -3.0231e-02
...
( 2 ,509,.,.) =
-5.2998e-03 -2.8590e-04 -4.9266e-03
-7.0530e-03 -2.3684e-04 -1.5838e-03
-6.9291e-03 4.8084e-04 -3.1548e-03
( 2 ,510,.,.) =
1.1854e-02 8.4836e-03 1.3839e-02
2.8741e-03 -9.7358e-05 4.4888e-03
-2.5515e-03 -2.7788e-03 -3.2464e-03
( 2 ,511,.,.) =
-1.2408e-02 -1.5001e-02 -1.3377e-02
-1.4540e-02 -1.8537e-02 -1.7392e-02
-6.7315e-03 -9.5205e-03 -9.0692e-03
...
⋮
(509, 0 ,.,.) =
3.0369e-03 1.9542e-03 1.7140e-03
-7.6240e-03 -2.8765e-03 -5.1760e-03
-9.3019e-03 -4.8800e-03 -4.2932e-03
(509, 1 ,.,.) =
4.4836e-03 2.4909e-03 1.5746e-03
1.2065e-02 1.2936e-02 1.0344e-02
1.9010e-02 1.7459e-02 1.5988e-02
(509, 2 ,.,.) =
-1.4914e-03 -8.1727e-03 -8.0671e-03
-6.6247e-03 -6.2421e-03 -9.2717e-03
-8.7991e-03 -7.7528e-03 -8.6336e-03
...
(509,509,.,.) =
-1.8040e-02 -1.5366e-02 -1.5334e-02
-1.3148e-02 -1.2180e-02 -1.0915e-02
-1.4545e-02 -1.4756e-02 -1.1787e-02
(509,510,.,.) =
3.5762e-03 6.6073e-03 -1.4055e-03
4.3975e-03 7.8375e-03 8.8085e-05
-5.0697e-03 -5.6633e-04 -5.9284e-03
(509,511,.,.) =
-1.9234e-03 -8.8012e-03 -5.8821e-03
3.6685e-03 -1.3784e-03 -3.2117e-03
-4.7037e-04 1.5340e-04 -3.4046e-03
⋮
(510, 0 ,.,.) =
-1.8305e-02 -1.7735e-02 -2.1683e-02
-1.6598e-02 -1.2508e-02 -2.0530e-02
-1.0800e-02 -9.8670e-03 -1.7195e-02
(510, 1 ,.,.) =
2.0721e-02 2.2466e-02 2.5049e-02
1.8682e-02 1.3160e-02 2.3696e-02
2.2104e-02 1.7261e-02 2.4877e-02
(510, 2 ,.,.) =
-5.7091e-03 -2.6876e-03 -9.2260e-04
-9.4530e-03 -7.0543e-03 -6.2770e-03
-4.5806e-03 -2.7182e-03 -2.5823e-03
...
(510,509,.,.) =
2.4150e-02 1.4002e-02 1.6559e-02
2.1363e-02 1.4359e-02 1.5854e-02
2.5786e-02 2.7233e-02 2.5104e-02
(510,510,.,.) =
-4.6450e-03 1.2419e-03 -1.8768e-03
1.3005e-03 4.0888e-03 -6.5483e-04
-7.9783e-03 -6.6539e-03 -8.9957e-03
(510,511,.,.) =
1.1494e-02 2.6621e-02 1.5649e-02
6.5960e-03 1.7290e-02 7.5466e-03
-8.0256e-03 4.6246e-03 -5.7808e-03
⋮
(511, 0 ,.,.) =
1.4232e-02 1.1769e-02 9.4342e-03
6.2592e-03 5.1087e-03 2.3311e-03
-1.9694e-03 2.7110e-03 -2.8945e-03
(511, 1 ,.,.) =
-7.0772e-03 1.0365e-03 -5.8451e-03
-9.1879e-03 -3.1388e-03 -8.1517e-03
-8.0300e-03 -5.1313e-03 -9.5734e-03
(511, 2 ,.,.) =
2.4314e-02 1.8942e-02 2.4256e-02
2.0090e-02 1.1472e-02 1.5993e-02
2.2910e-02 2.0622e-02 2.3820e-02
...
(511,509,.,.) =
-1.6375e-02 -1.6928e-02 -1.9019e-02
-9.7367e-03 -1.1274e-02 -1.0261e-02
-1.2310e-02 -1.5931e-02 -1.4151e-02
(511,510,.,.) =
4.7098e-03 -4.5205e-04 2.8042e-03
2.1428e-03 -4.6175e-03 -1.6818e-03
-1.3336e-03 -5.5009e-03 -2.6237e-03
(511,511,.,.) =
-1.4367e-02 -1.3520e-02 -1.1387e-02
-4.7420e-03 -1.7309e-03 -2.6426e-03
5.1448e-03 7.0428e-03 5.0202e-03
[torch.FloatTensor of size 512x512x3x3]
), (u'layer4.1.bn2.running_mean',
1.00000e-02 *
-1.3953
-5.0031
-4.3323
-1.5914
-4.6112
-2.3473
-2.5429
-3.3783
5.1665
-3.5325
-3.7555
-1.3353
-3.4012
-0.2871
-4.3814
-2.6424
-3.0987
-9.1183
1.5193
-3.9889
0.3516
-6.3124
-2.8069
-3.8168
-0.5747
-1.9639
-2.0180
-4.1006
-3.7068
-4.9702
-2.0847
-3.5155
-2.2799
-3.5089
-2.5835
-2.6871
-3.3089
-5.3280
-2.9554
-2.7207
-7.9410
-3.8662
-7.0901
-0.4792
-4.0081
-4.4518
-0.9688
3.9220
-4.1579
-3.5060
-2.7755
-0.7651
-4.4367
-2.6813
-1.2360
-3.5112
-3.1672
-2.4467
-6.0395
-4.6648
-5.3290
-2.8216
-5.4557
-5.4704
-2.8591
-5.0634
0.0627
-5.1950
-5.1578
-3.6758
-3.0772
-3.9569
-1.9722
-4.3900
-4.4507
-5.3416
-4.3945
-2.3374
-5.1497
-4.4268
-3.9613
0.7135
-3.1644
3.4458
-3.8945
-3.8628
-2.9412
-3.6697
-3.3454
-0.3520
-2.3919
-1.5737
-1.8832
-3.6160
-3.0676
-3.5423
-3.6338
-6.0085
-2.4744
-1.0668
-6.4177
-4.9577
-5.2484
-5.4054
-0.5603
-1.1281
-5.2175
-4.2486
-2.8311
-3.6422
-0.5653
-2.8168
-2.9531
-3.7204
-6.7556
-3.1953
-4.5689
4.5595
-3.0731
-2.3350
-3.2033
-1.5946
-4.3791
0.4781
0.0364
-3.1503
-2.3126
-3.7362
-4.6890
-2.7394
-5.6134
-4.9824
-4.3524
-2.7824
-7.0928
-6.0755
-5.0579
-2.8704
-2.6274
-3.3160
-0.8618
-3.9907
-3.5256
-8.6042
-2.6312
-3.2020
-1.4972
-1.1586
-3.8802
-6.7321
-4.5730
-1.3368
-2.9202
-1.5672
-5.5057
-4.7705
-2.6542
-1.3914
-1.9433
-3.6511
-5.1134
-4.3920
-3.3364
4.1027
-3.2706
-0.0820
-1.9290
-4.7500
-4.4132
-3.8169
-2.4048
-4.1317
-2.0381
-3.9825
-4.3505
-1.3664
-3.0153
-3.2162
-5.1351
-4.3963
-0.1310
-2.0620
-4.1151
-7.4645
-1.3569
-4.2029
-3.6559
-5.5496
-2.4927
-4.3322
-2.5386
-0.5925
0.9121
-4.6321
-4.9662
-1.3392
-4.9198
-2.2978
-1.3565
-4.4778
-4.1518
-5.3186
-6.2418
-3.3953
-1.8224
-4.7834
-2.5541
-1.6724
-6.6561
-5.5189
-2.9102
0.1744
-2.8708
-4.7356
-3.9403
-5.2492
-4.8850
-3.3341
-2.7483
-6.1212
-4.5193
-3.5821
0.8330
-1.7342
0.1103
-6.2373
-2.4603
-7.2638
2.1412
-7.5782
-3.2325
-2.4850
-2.2635
-0.8499
-2.4250
0.6696
-1.6815
-5.7800
-4.0070
-2.5381
-2.6095
-3.6127
-4.1404
-4.7404
-0.1727
-5.8207
-6.2922
-4.1185
-2.7714
-2.6942
-1.9350
-0.2645
-4.6726
-4.4284
-2.1652
-4.8506
-4.0399
-3.5572
-4.6054
-3.1532
-3.2670
-4.3606
-5.3407
-2.9613
-6.5983
-3.6677
-1.5673
-4.1916
0.7200
-2.6574
-2.9427
-4.6752
-1.6942
-1.7730
-3.1830
-2.0861
-4.2271
-3.7406
-3.8363
-4.3299
0.7099
-1.6024
-1.1558
-1.3649
-1.9286
-0.5381
-3.8080
-4.0525
-3.7919
-3.8805
-7.5134
-1.3963
0.2917
-1.8857
-1.8787
-1.8889
-3.9999
-4.7723
-1.5847
-3.8556
-0.3824
-4.1886
-2.2822
-2.7051
-1.2578
-1.2243
-1.0389
-3.0908
-4.9441
-5.7127
-4.3721
-2.9496
-2.8846
-3.4347
-3.3969
-1.8485
-2.0259
-0.6510
-3.2701
-1.2320
-4.2393
-4.4799
-2.5397
-3.7255
-3.0126
-3.3043
1.1193
-3.4983
-3.1577
-3.6480
-1.0290
2.8495
-3.6363
-0.4754
-5.7458
-1.7357
-0.9672
-2.6240
-3.2853
-0.1027
-5.6348
-1.9716
-6.3584
1.1124
-2.9937
-2.5287
-3.9320
0.2933
-4.8010
-3.1270
-3.5449
-2.9562
-1.7369
-5.0306
-4.5947
-1.7834
-0.6932
-0.3274
-4.8955
-3.5086
-2.4075
-2.1984
-0.7037
-3.7546
-3.1797
-2.4134
-3.3352
-3.2565
-1.6909
-2.2290
-3.9201
-1.0906
-5.6042
-4.8766
-3.0840
-3.4916
-3.8825
-4.3324
-5.6847
1.0243
-4.6514
-1.0452
-0.2154
-4.4856
-3.6066
-1.7105
-0.5236
-2.8570
-5.0284
-6.5926
-1.6846
-3.1785
-6.2677
-5.5734
-3.5885
-1.4798
-4.3455
-1.3114
-3.5012
-2.1125
-7.3286
-4.1934
-1.7432
-3.5229
3.5735
-3.0858
-4.1892
-3.5874
0.1710
-1.8882
-3.2570
-6.8433
-0.2356
-4.5632
0.1103
-7.9181
-1.5563
-2.2546
-1.9013
-4.7557
-1.5476
-4.5174
-2.5230
-3.1111
-1.7632
-1.1193
-1.6986
-6.3783
-2.5520
0.9365
-2.4927
-5.2760
-6.2665
-2.4147
-5.4109
-6.4714
-1.9359
-1.4110
-4.5960
-4.2290
-2.9651
-1.1331
-4.9568
-4.5198
-4.6655
-3.9152
-7.2373
-2.8233
-3.3341
0.0050
-2.4896
-0.4391
-5.7027
-1.6781
-4.1684
-4.3151
-1.1696
-3.2351
-0.9796
-1.0248
-3.1722
-3.1369
-4.3368
-4.1376
-1.7700
-6.5839
-4.4930
-3.0312
-4.9151
-3.4421
-2.9603
-2.7210
-2.1330
-3.6309
-2.3335
-4.0678
-1.2841
-3.0524
-6.1549
-4.6466
-6.2686
-3.6889
-3.9056
-3.4740
-0.6074
-1.9422
-4.2960
-3.7847
-7.7137
-2.9199
-5.4336
-2.5864
-3.2088
-5.0267
-2.6562
-1.9347
-1.2865
-3.7129
-3.3561
-5.6942
-2.3849
-0.7705
-5.8456
0.0136
-6.6229
-2.9168
-0.3950
-5.2685
-1.9541
-5.8807
-4.5790
-3.0423
[torch.FloatTensor of size 512]
), (u'layer4.1.bn2.running_var',
1.00000e-02 *
1.2607
1.2795
1.2836
1.3783
1.2441
1.3147
1.4444
1.6157
1.2308
1.1641
1.1995
1.1215
1.2360
1.0052
1.2017
1.3942
1.3127
1.8412
1.2599
1.0842
1.2449
1.3412
1.3765
1.3621
1.2877
1.2920
1.2716
1.2023
1.4954
1.2628
1.2342
1.2743
1.2353
1.1137
1.2544
1.2324
1.4762
1.2765
1.1890
1.0633
1.2721
1.0859
1.2173
1.2396
1.3669
1.2910
1.2683
2.0786
1.2164
1.1582
1.2351
1.3041
1.0677
1.5028
1.1270
1.2414
1.5545
1.4291
1.3015
1.1886
1.1764
1.7043
1.1810
1.4155
1.1373
1.2751
1.0263
1.3853
1.5373
1.2052
1.3857
1.1107
1.1257
1.3424
1.1654
1.3275
1.2267
1.1632
1.1624
1.1821
1.1366
1.4051
1.1627
1.5670
1.0272
1.2229
1.3182
1.1980
1.1770
1.1470
0.9519
1.3395
1.2046
1.3805
1.1765
1.2712
1.1828
1.1465
1.2209
1.1313
1.2856
1.2779
1.2912
1.2170
1.1401
1.1912
1.4428
1.4182
1.3258
1.5467
1.1182
1.1008
1.1993
1.3008
1.3681
1.3370
1.1496
1.6639
1.1855
1.2463
1.2111
1.3034
1.2276
0.9981
1.2321
1.1815
1.2773
1.1727
1.1281
1.3318
1.4112
1.2649
1.0986
1.2151
1.5177
1.3746
1.2133
1.3573
1.2481
1.3561
1.0291
1.2488
1.1282
1.2459
1.3162
1.3991
1.2794
1.5236
1.1475
1.4152
1.1746
1.1560
1.4177
1.1815
1.0985
1.4292
1.3252
1.3664
1.2592
1.1000
1.2848
2.0357
1.2684
3.0875
1.6966
1.3806
1.0805
1.1598
1.2627
1.2273
1.2620
1.1659
1.2492
1.2681
1.2751
1.0844
1.4885
1.3681
1.2171
1.1670
1.3635
1.1769
1.8156
1.1138
1.2628
1.5029
1.5616
1.3909
1.1480
1.1303
1.2139
1.1750
1.5876
1.3656
1.0394
1.2946
1.2624
1.1957
1.3235
1.1265
1.1500
1.2635
1.5489
0.9593
1.4590
1.4663
1.3447
1.2776
1.2686
1.2837
1.0899
1.0744
1.1642
1.1800
1.4411
1.2817
1.1857
1.1787
1.0723
1.4497
1.3106
1.3415
1.3301
1.2676
1.1392
1.2345
1.2399
1.2312
1.3118
1.1824
1.2197
1.2423
1.2812
1.3887
1.1381
1.1887
1.0703
1.5483
1.2125
1.2822
1.1870
1.2034
1.7866
1.0217
1.2893
1.4659
1.2393
1.1389
1.1972
1.2732
1.3072
1.2994
1.2904
1.2871
1.1562
1.4855
1.2064
1.4229
1.2749
1.5885
1.3019
1.2125
1.2583
1.1958
1.3250
1.0867
1.3941
1.0751
1.2574
1.2344
1.0800
1.1533
1.3274
1.1349
1.4498
1.2250
1.2234
0.9903
1.1828
1.2083
1.5951
1.0767
1.1830
1.3225
1.1655
1.1856
1.2551
1.1889
1.2027
1.3007
1.1249
1.2834
1.3066
1.4390
1.3390
1.1616
1.3649
1.3628
1.2689
0.9673
1.3976
1.2583
1.1835
1.2000
1.4709
1.3959
1.2518
1.3496
1.2184
1.4348
1.2852
1.2958
1.3992
1.1663
1.0442
1.1392
1.3530
1.2199
1.3925
1.2103
1.0940
1.2331
1.4481
1.2432
1.1955
1.1361
1.3141
1.3357
1.0638
1.1367
1.1926
1.5863
1.3304
1.2212
1.3405
1.1748
1.0780
1.1570
1.3548
1.3191
1.1238
1.1355
1.1769
1.4076
1.0655
1.1557
1.2413
1.1456
1.1505
1.2523
1.1101
1.1558
1.1428
1.0822
1.1301
1.1807
1.2160
1.2464
1.1496
1.2547
1.4902
1.4602
1.2770
1.2263
1.4406
1.2328
1.1850
1.2651
1.3965
1.4678
1.2244
1.2105
1.2584
1.1940
1.0827
1.3151
1.1509
1.1410
1.3750
1.2897
1.4835
1.2276
1.1962
1.2476
1.3449
1.3318
1.2557
1.6294
1.2615
1.1391
1.0025
1.3623
1.2699
1.1068
1.3502
1.2616
1.1090
1.2450
1.3262
1.2724
1.6779
1.3447
1.1733
1.2772
1.3858
1.2996
1.3405
1.2329
1.2411
1.2495
1.3410
1.1526
1.1101
1.1719
1.2455
1.3591
1.0963
1.2343
1.1039
1.2518
1.1693
1.1699
1.3526
1.2257
1.2567
1.4976
1.2528
1.3554
1.1318
1.2526
1.2288
1.3581
1.1964
1.2445
1.4404
1.6388
1.2757
1.2317
1.1435
1.1726
1.3039
1.2119
1.3858
1.1201
1.2956
1.2951
1.2869
1.2629
1.6022
1.1351
1.1411
1.4286
1.2237
1.2991
1.2031
1.1916
1.0642
1.3661
1.3933
1.2715
1.4832
1.1984
1.2630
1.1473
1.3745
1.1393
1.1939
1.3160
1.3901
1.2581
1.1313
1.0755
1.1584
1.2514
1.2153
1.3047
1.1249
1.1903
1.2367
1.1338
1.2559
1.1869
1.2105
1.0223
1.5068
1.3862
1.0991
1.2486
1.2651
1.0860
1.3252
1.5014
1.2576
1.2565
1.2202
1.4279
1.0337
1.3899
1.3158
1.2282
1.4694
1.0891
1.4762
1.0859
1.0720
1.1243
1.5002
1.2772
1.1317
1.2571
1.6188
1.3516
[torch.FloatTensor of size 512]
), (u'layer4.1.bn2.weight', Parameter containing:
1.8419
1.8307
1.7650
1.8288
1.9505
1.8026
1.9536
2.2790
1.7662
1.8902
1.7768
1.7749
1.9055
1.7328
1.8762
1.8211
1.7967
2.3428
1.7985
1.7271
1.7915
1.9512
1.8928
1.9017
1.8784
1.9809
1.8569
1.7830
1.8911
1.8859
1.7764
1.9832
1.8389
1.7616
1.8728
1.8753
1.9008
1.8209
1.7039
1.7377
1.7786
1.6944
1.7829
1.7815
1.7594
1.8428
1.9238
2.0871
1.8980
1.8413
1.8471
1.8584
1.7640
1.8453
1.7606
1.9504
1.9620
1.8755
1.9424
1.8731
1.8674
1.9422
1.8750
1.9208
1.7464
1.8558
1.6539
2.0660
2.0298
1.9174
1.8972
1.7589
1.7551
1.9560
1.7909
1.7971
1.7851
1.7733
1.8061
1.7949
1.8169
1.8089
1.8641
2.1542
1.7739
1.7913
1.8022
1.7155
1.7679
1.7704
1.6266
1.8645
1.9076
1.8576
1.6924
1.8020
1.7100
1.7713
1.8572
1.7103
2.0664
1.9054
1.9422
1.8078
1.7412
1.6061
1.9105
1.8947
1.7954
1.8989
1.8239
1.7619
1.7951
1.8149
1.8539
1.8502
1.7095
2.1831
1.8599
1.8252
1.8193
1.8460
1.7968
1.6229
1.8450
1.8290
1.8706
1.9293
1.6881
1.9725
1.8981
1.8925
1.8851
1.8445
1.9764
2.0674
1.8384
1.8414
1.8762
1.7931
1.7131
1.9644
1.7854
1.9369
1.8972
1.8940
1.8700
1.7967
1.8775
1.9409
1.7391
1.7944
1.9678
1.7678
1.6851
1.9414
1.9663
1.9882
1.7915
1.8141
1.8325
2.1200
1.9256
2.3592
2.0304
1.9594
1.7334
1.9048
1.8221
1.7811
1.9084
1.8053
1.9171
1.9644
1.8256
1.6432
1.9173
1.9094
1.9923
1.7963
1.9077
1.7619
2.1724
1.7931
1.7564
1.8889
1.9832
1.9136
1.8035
1.8419
1.8278
1.8057
1.9063
1.8646
1.7848
1.8230
1.7986
1.7091
1.7724
1.7939
1.7611
1.9325
2.0162
1.7295
2.0196
1.8876
1.8325
1.8225
1.7870
1.9160
1.7197
1.7170
1.9133
1.7770
1.9943
1.8389
1.8070
1.8516
1.7857
1.9648
1.9553
1.9232
1.8086
1.8114
1.7141
1.8058
1.8532
1.9255
1.7682
1.8314
1.8495
1.8296
1.8278
1.8819
1.7698
1.7838
1.7807
1.9974
1.6994
1.9483
1.7793
1.8029
2.2210
1.6455
1.8357
2.1706
1.9204
1.7414
1.7809
1.8648
1.9145
1.8849
1.8346
1.9368
1.8169
2.2302
1.8262
2.0651
1.9888
1.8169
1.8462
1.9681
1.8083
1.8595
1.8539
1.7699
1.9001
1.7285
1.7553
1.8924
1.7829
1.9428
1.8724
1.7228
2.0548
1.7732
1.8561
1.7699
1.9269
1.8171
2.4075
1.7257
1.7819
1.7244
1.8521
1.8302
1.8797
1.7617
1.9650
1.9807
1.7102
1.7486
1.8350
1.9919
1.8505
1.9000
1.8269
1.9787
1.7635
1.6071
1.7998
1.9545
1.7348
1.7140
1.8851
1.7981
1.9100
1.8315
1.7864
1.9165
1.8839
1.9017
1.9334
1.7405
1.7661
1.8015
1.9987
1.7622
1.9107
1.8444
1.7128
1.8726
1.8529
1.9270
1.8769
1.7261
1.8393
1.9075
1.7953
1.8246
1.7605
2.0470
1.9221
1.9205
1.8910
1.7666
1.6801
1.8308
1.8845
1.8339
1.8238
1.7616
1.6114
1.8411
1.7437
1.8423
1.9540
1.7465
1.7741
1.8746
1.8856
1.7740
1.7603
1.7682
1.8396
1.6869
1.8080
1.8836
1.8283
1.8341
1.8522
1.9749
1.8707
1.7719
1.8993
1.8108
1.8480
1.8267
1.8731
1.9576
1.8347
1.9509
1.9641
1.7997
1.7652
1.9253
1.7126
1.7551
1.9427
1.8559
1.9163
1.7681
1.7803
1.8500
1.8535
1.8865
1.7599
2.0692
1.8021
1.7077
1.8890
1.9457
1.8516
1.7882
1.8356
1.8472
1.6708
1.7435
1.9080
1.9653
2.0401
1.8935
1.8450
1.7536
1.7733
1.8135
1.8534
1.9368
1.7348
1.8738
1.9632
1.9033
1.7422
1.7842
1.8516
2.0218
1.7044
1.8793
1.8655
1.8516
1.8002
1.8687
1.8460
1.7589
1.8174
1.9830
1.9034
2.1222
1.8460
1.9209
1.8893
1.9422
1.8489
1.8396
1.9953
2.0865
1.8253
1.7700
1.8035
1.7535
1.8923
1.8620
1.8627
1.7264
1.8140
1.9613
1.8812
1.8729
2.0050
1.7092
1.7726
1.9410
1.8381
1.8366
1.7276
1.8796
1.7548
1.9536
1.8062
1.8883
2.0278
1.8775
1.9446
1.8676
1.8423
1.7798
1.9403
1.8375
2.0473
1.9507
1.8337
1.8184
1.7791
1.8993
1.8781
1.8691
1.8493
1.7623
1.9458
1.7564
1.7448
1.8633
1.6863
1.8062
1.8702
2.0048
1.8504
1.8964
1.9489
1.8264
1.9019
1.8196
1.9712
1.8969
1.8652
1.8709
1.6984
1.8677
1.8846
1.9256
1.8620
1.6366
1.8434
1.7506
1.8438
1.5788
1.9316
1.9535
1.7878
1.7354
2.0920
1.9456
[torch.FloatTensor of size 512]
), (u'layer4.1.bn2.bias', Parameter containing:
0.2371
0.3433
0.3279
0.4642
0.2233
0.2370
0.2176
0.3793
0.3140
0.2803
0.2434
0.2116
0.2478
0.2435
0.2298
0.3172
0.2725
0.6511
0.2925
0.2281
0.2279
0.4254
0.2342
0.3328
0.2632
0.2176
0.3180
0.3893
0.1387
0.2274
0.3379
0.0767
0.2253
0.2504
0.1990
0.1951
0.2566
0.3253
0.2797
0.3149
0.2373
0.2533
0.1956
0.3236
0.2093
0.2333
0.2300
0.5019
0.2830
0.1885
0.3264
0.2722
0.2369
0.2430
0.3625
0.2165
0.4700
0.3047
0.3675
0.2641
0.1979
0.2664
0.3448
0.2005
0.2450
0.4351
0.2689
0.1632
0.3087
0.1209
0.2153
0.1592
0.2960
0.1423
0.2951
0.2706
0.2007
0.2939
0.2210
0.2243
0.2465
0.3910
0.4599
0.5417
0.2147
0.3469
0.2703
0.2229
0.3645
0.2647
0.2421
0.2492
0.1666
0.2763
0.2560
0.2151
0.3363
0.2767
0.2516
0.2988
0.2622
0.3499
0.3001
0.3907
0.3184
0.2233
0.2649
0.2110
0.2034
0.2752
0.2314
0.3480
0.2238
0.2892
0.1991
0.2923
0.3259
0.0722
0.3039
0.3041
0.3803
0.2568
0.2382
0.3057
0.2652
0.1532
0.2110
0.2567
0.3148
0.2746
0.1833
0.1950
0.1116
0.2279
0.3705
0.2477
0.2000
0.3060
0.2548
0.2468
0.3028
0.1921
0.2952
0.1980
0.2135
0.1583
0.1586
0.3944
0.2352
0.3947
0.2740
0.2861
0.1856
0.2702
0.2986
0.1728
0.2658
0.2696
0.2028
0.1838
0.3176
0.6246
0.2631
0.3855
0.2074
0.2317
0.4171
0.2044
0.2926
0.3506
0.2305
0.2400
0.1420
0.1093
0.2757
0.3253
0.2334
0.1650
0.4026
0.2066
0.1790
0.3032
0.5658
0.3246
0.3834
0.3254
0.1772
0.2909
0.2350
0.2519
0.1968
0.2003
0.3213
0.4802
0.2543
0.2578
0.3280
0.2270
0.3044
0.2273
0.2447
0.2527
0.4136
0.2588
0.3589
0.2688
0.2115
0.2022
0.3186
0.3740
0.1785
0.2074
0.2346
0.3566
0.2623
0.2620
0.2880
0.1462
0.1896
0.2777
0.1852
0.3240
0.2748
0.2164
0.3066
0.1845
0.3992
0.1695
0.4411
0.2812
0.2730
0.2784
0.1861
0.3589
0.1934
0.3320
0.3350
0.2655
0.2740
0.3185
0.2633
0.2458
0.2003
0.2809
0.3049
0.2050
0.2904
0.2381
0.3278
0.3484
0.4293
0.2422
0.2859
0.1864
0.2954
0.5634
0.2081
0.3743
0.2902
0.3820
0.3069
0.2101
0.2750
0.2878
0.1870
0.3015
0.1661
0.2998
0.3101
0.2522
0.2419
0.1758
0.2681
0.2812
0.1495
0.2868
0.3157
0.2587
0.2437
0.1467
0.5416
0.2490
0.2831
0.2783
0.1614
0.1963
0.2034
0.2364
0.2527
0.1573
0.3184
0.2841
0.1613
0.1489
0.2850
0.1625
0.3277
0.4936
0.2780
0.3178
0.1743
0.2158
0.2222
0.2821
0.4267
0.2713
0.1778
0.3067
0.2270
0.1772
0.3897
0.2923
0.4843
0.2345
0.2327
0.2740
0.2700
0.2804
0.4035
0.1501
0.3329
0.3286
0.2803
0.2309
0.1738
0.3270
0.3097
0.1808
0.2384
0.2107
0.3240
0.3346
0.2236
0.2061
0.2687
0.2360
0.3338
0.2694
0.3203
0.2895
0.1884
0.1491
0.3957
0.5167
0.3407
0.1854
0.1816
0.2626
0.1855
0.2219
0.1482
0.2584
0.2458
0.2616
0.2396
0.2402
0.2423
0.3463
0.2731
0.1524
0.2514
0.2760
0.1734
0.2715
0.4052
0.2252
0.3676
0.3070
0.3127
0.1836
0.4330
0.2203
0.2073
0.2803
0.2984
0.2191
0.3272
0.2267
0.2749
0.3056
0.4566
0.2962
0.3528
0.3236
0.4220
0.2715
0.2256
0.2903
0.1829
0.3994
0.2820
0.2471
0.1647
0.3654
0.4504
0.2685
0.2992
0.2825
0.2435
0.2212
0.4300
0.4342
0.1988
0.2863
0.3398
0.2444
0.2905
0.2559
0.2586
0.1702
0.1906
0.2536
0.2978
0.2498
0.3777
0.2252
0.2472
0.2243
0.1732
0.2194
0.2091
0.2820
0.2898
0.2887
0.3292
0.1644
0.2962
0.3279
0.2535
0.2795
0.2238
0.2607
0.1937
0.2680
0.2418
0.5193
0.2502
0.3147
0.2166
0.2313
0.2027
0.1880
0.2180
0.3826
0.3871
0.2358
0.3556
0.2272
0.3272
0.3442
0.3154
0.1993
0.3135
0.2254
0.3048
0.2658
0.3337
0.2679
0.2670
0.2363
0.4347
0.1931
0.1995
0.2072
0.3202
0.2667
0.2305
0.2383
0.2246
0.2562
0.2837
0.4046
0.2786
0.2243
0.1591
0.1923
0.1894
0.2496
0.1140
0.3128
0.3197
0.3530
0.2999
0.2115
0.4718
0.2979
0.3472
0.2890
0.4740
0.2230
0.3630
0.4015
0.2446
0.1897
0.1460
0.1874
0.2734
0.2366
0.3001
0.2359
0.2688
0.3256
0.2749
0.2848
0.2299
0.3001
0.4818
0.3074
0.3164
0.3114
0.3549
0.2859
[torch.FloatTensor of size 512]
), (u'fc.weight', Parameter containing:
-1.8474e-02 -7.0461e-02 -5.1772e-02 ... -3.9030e-02 1.7351e-01 -4.0976e-02
-8.1792e-02 -9.4370e-02 1.7355e-02 ... 2.0284e-01 -2.4782e-02 3.7172e-02
-3.3164e-02 -5.6569e-02 -2.4165e-02 ... -3.4402e-02 -2.2659e-02 1.9705e-02
... ⋱ ...
-1.0300e-02 3.2804e-03 -3.5863e-02 ... -2.7923e-02 -1.1458e-02 1.2759e-02
-3.5879e-02 -3.5296e-02 -2.9602e-02 ... -3.2961e-02 -1.1022e-02 -5.1256e-02
2.1277e-03 -2.4839e-02 -8.2920e-02 ... 4.1731e-02 -5.0030e-02 6.6327e-02
[torch.FloatTensor of size 1000x512]
), (u'fc.bias', Parameter containing:
1.00000e-02 *
-0.2634
0.3000
0.0656
⋮
-1.7868
-0.0782
-0.6345
[torch.FloatTensor of size 1000]
)])
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-125-430ef2cc5165> in <module>()
3 del loaded[u'conv1.weight']
4 print loaded
----> 5 mynet.load_state_dict(loaded)
6
7 print "after..............."
/Users/albertxavier/anaconda/lib/python2.7/site-packages/torch/nn/modules/module.pyc in load_state_dict(self, state_dict)
309 if name not in own_state:
310 raise KeyError('unexpected key "{}" in state_dict'
--> 311 .format(name))
312 if isinstance(param, Parameter):
313 # backwards compatibility for serialized parameters
KeyError: 'unexpected key "layer3.0.conv1.weight" in state_dict'
In [ ]:
import torch.nn as nn
import math
import torch.utils.model_zoo as model_zoo
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152']
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000):
self.inplanes = 64
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AvgPool2d(7)
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
return model
def resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
return model
def resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet50']))
return model
def resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet101']))
return model
def resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet152']))
return model
In [73]:
from graphviz import Digraph
from torch.autograd import Variable
def save(fname, creator):
dot = Digraph(comment='LRP',
node_attr={'style': 'filled', 'shape': 'box'})
#, 'fillcolor': 'lightblue'})
seen = set()
def add_nodes(var):
if var not in seen:
if isinstance(var, Variable):
dot.node(str(id(var)), str(var.size()), fillcolor='lightblue')
else:
dot.node(str(id(var)), type(var).__name__)
seen.add(var)
if hasattr(var, 'previous_functions'):
for u in var.previous_functions:
dot.edge(str(id(u[0])), str(id(var)))
add_nodes(u[0])
add_nodes(creator)
dot.save(fname)
# dot.save(fname)
# print mymodel.creator
x = Variable(torch.rand(2,3,224,224))
out = mymodel(x)
save("./mynet.dot", out.creator)
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-73-7b6e84e2b446> in <module>()
26 # print mymodel.creator
27 x = Variable(torch.rand(2,3,224,224))
---> 28 out = mymodel(x)
29 save("./mynet.dot", out.creator)
/Users/albertxavier/anaconda/lib/python2.7/site-packages/torch/nn/modules/module.pyc in __call__(self, *input, **kwargs)
200
201 def __call__(self, *input, **kwargs):
--> 202 result = self.forward(*input, **kwargs)
203 for hook in self._forward_hooks.values():
204 hook_result = hook(self, input, result)
<ipython-input-72-792fd9e0b5c0> in forward(self, x)
8 x = self.pretrained_model(x)
9 node = self.pretrained_model.layer1[0].relu
---> 10 y = self.fc(node)
11 return y
12 # x = model.layer1[0].relu
/Users/albertxavier/anaconda/lib/python2.7/site-packages/torch/nn/modules/module.pyc in __call__(self, *input, **kwargs)
200
201 def __call__(self, *input, **kwargs):
--> 202 result = self.forward(*input, **kwargs)
203 for hook in self._forward_hooks.values():
204 hook_result = hook(self, input, result)
/Users/albertxavier/anaconda/lib/python2.7/site-packages/torch/nn/modules/linear.pyc in forward(self, input)
52 return self._backend.Linear()(input, self.weight)
53 else:
---> 54 return self._backend.Linear()(input, self.weight, self.bias)
55
56 def __repr__(self):
RuntimeError: expected a Variable argument, but got ReLU
In [ ]:
Content source: albertxavier001/graduation-project
Similar notebooks: